From e3f9e8ae663e93c21abceb4d2c78f49e5e02e7e8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 28 Oct 2024 15:43:12 +0100 Subject: [PATCH 001/121] upgrade reqs --- services/director-v2/requirements/_base.txt | 330 ++++++++++++++++++-- 1 file changed, 308 insertions(+), 22 deletions(-) diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 42c06dd93de..3922513db61 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -25,20 +25,37 @@ aiofiles==23.2.1 # -r requirements/../../../packages/simcore-sdk/requirements/_base.in aiohttp==3.9.5 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -57,6 +74,8 @@ alembic==1.13.1 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # fast-depends @@ -94,20 +113,37 @@ blosc==1.11.1 # via -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt certifi==2024.2.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -156,28 +192,13 @@ email-validator==2.1.1 # pydantic fast-depends==2.4.12 # via faststream -fastapi==0.99.1 +fastapi==0.115.4 # via - # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator +fastapi-cli==0.0.5 + # via fastapi faststream==0.5.28 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -213,20 +234,37 @@ httptools==0.6.1 # via uvicorn httpx==0.27.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -249,20 +287,37 @@ itsdangerous==2.2.0 # via fastapi jinja2==3.1.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -287,20 +342,37 @@ lz4==4.3.3 # via -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt mako==1.3.5 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -390,28 +462,62 @@ ordered-set==4.1.0 # via -r requirements/_base.in orjson==3.10.3 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in # fastapi packaging==24.0 @@ -448,45 +554,120 @@ psycopg2-binary==2.9.9 # via # aiopg # sqlalchemy -pydantic==1.10.15 +pydantic==2.9.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in # fast-depends # fastapi + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.23.4 + # via pydantic +pydantic-extra-types==2.9.0 + # via + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # fastapi +pydantic-settings==2.6.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # fastapi pygments==2.18.0 # via rich pyinstrument==4.6.2 @@ -497,7 +678,7 @@ python-dateutil==2.9.0.post0 # via arrow python-dotenv==1.0.1 # via - # pydantic + # pydantic-settings # uvicorn python-engineio==4.9.1 # via python-socketio @@ -507,20 +688,37 @@ python-socketio==5.11.2 # via -r requirements/_base.in pyyaml==6.0.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -534,20 +732,37 @@ pyyaml==6.0.1 # uvicorn redis==5.0.4 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -598,20 +813,37 @@ sortedcontainers==2.4.0 # distributed sqlalchemy==1.4.52 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -619,22 +851,39 @@ sqlalchemy==1.4.52 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiopg # alembic -starlette==0.27.0 +starlette==0.41.2 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -674,6 +923,7 @@ typer==0.12.3 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in + # fastapi-cli types-python-dateutil==2.9.0.20240316 # via arrow typing-extensions==4.11.0 @@ -688,43 +938,78 @@ typing-extensions==4.11.0 # opentelemetry-sdk # pint # pydantic + # pydantic-core # typer ujson==5.10.0 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi urllib3==2.2.1 # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -735,6 +1020,7 @@ uvicorn==0.29.0 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cli uvloop==0.19.0 # via uvicorn watchfiles==0.21.0 From 3c9391a4b79e5296a8b6682aa2334da59f1534f7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 28 Oct 2024 16:10:21 +0100 Subject: [PATCH 002/121] run bump-pydantic --- .../core/dynamic_services_settings/sidecar.py | 64 +++++++++-------- .../core/settings.py | 66 ++++++++++++------ .../models/comp_pipelines.py | 18 ++--- .../models/comp_runs.py | 29 ++++---- .../models/comp_tasks.py | 58 ++++++++-------- .../models/dynamic_services_scheduler.py | 69 ++++++++----------- .../models/pricing.py | 9 ++- 7 files changed, 168 insertions(+), 145 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py index 98ce21fc6a4..434c3e0941f 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/sidecar.py @@ -9,7 +9,7 @@ ensure_unique_dict_values_validator, ensure_unique_list_values_validator, ) -from pydantic import Field, PositiveInt, validator +from pydantic import AliasChoices, Field, PositiveInt, ValidationInfo, field_validator from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.base import BaseCustomSettings from settings_library.efs import AwsEfsSettings @@ -45,10 +45,10 @@ class RCloneSettings(SettingsLibraryRCloneSettings): description="VFS operation mode, defines how and when the disk cache is synced", ) - @validator("R_CLONE_POLL_INTERVAL_SECONDS") + @field_validator("R_CLONE_POLL_INTERVAL_SECONDS") @classmethod - def enforce_r_clone_requirement(cls, v: int, values) -> PositiveInt: - dir_cache_time = values["R_CLONE_DIR_CACHE_TIME_SECONDS"] + def enforce_r_clone_requirement(cls, v: int, info: ValidationInfo) -> PositiveInt: + dir_cache_time = info.data["R_CLONE_DIR_CACHE_TIME_SECONDS"] if v >= dir_cache_time: msg = f"R_CLONE_POLL_INTERVAL_SECONDS={v} must be lower than R_CLONE_DIR_CACHE_TIME_SECONDS={dir_cache_time}" raise ValueError(msg) @@ -60,7 +60,7 @@ class PlacementSettings(BaseCustomSettings): # https://docs.docker.com/engine/swarm/services/#control-service-placement. DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS: list[DockerPlacementConstraint] = Field( default_factory=list, - example='["node.labels.region==east", "one!=yes"]', + examples=['["node.labels.region==east", "one!=yes"]'], ) DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ @@ -72,20 +72,18 @@ class PlacementSettings(BaseCustomSettings): "see https://github.com/ITISFoundation/osparc-simcore/issues/5250 " "When `None` (default), uses generic resources" ), - example='{"AIRAM": "node.labels.custom==true"}', + examples=['{"AIRAM": "node.labels.custom==true"}'], ) - _unique_custom_constraints = validator( + _unique_custom_constraints = field_validator( "DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS", - allow_reuse=True, )(ensure_unique_list_values_validator) - _unique_resource_placement_constraints_substitutions = validator( + _unique_resource_placement_constraints_substitutions = field_validator( "DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS", - allow_reuse=True, )(ensure_unique_dict_values_validator) - @validator("DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") + @field_validator("DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") @classmethod def warn_if_any_values_provided(cls, value: dict) -> dict: if len(value) > 0: @@ -101,40 +99,46 @@ def warn_if_any_values_provided(cls, value: dict) -> dict: class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED: bool = Field( # doc: https://docs.docker.com/engine/swarm/networking/#configure-service-discovery default=False, - env=["DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED"], + validation_alias=AliasChoices( + "DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED" + ), description="dynamic-sidecar's service 'endpoint_spec' with {'Mode': 'dnsrr'}", ) DYNAMIC_SIDECAR_SC_BOOT_MODE: BootModeEnum = Field( ..., description="Boot mode used for the dynamic-sidecar services" "By defaults, it uses the same boot mode set for the director-v2", - env=["DYNAMIC_SIDECAR_SC_BOOT_MODE", "SC_BOOT_MODE"], + validation_alias=AliasChoices("DYNAMIC_SIDECAR_SC_BOOT_MODE", "SC_BOOT_MODE"), ) DYNAMIC_SIDECAR_LOG_LEVEL: str = Field( "WARNING", description="log level of the dynamic sidecar" "If defined, it captures global env vars LOG_LEVEL and LOGLEVEL from the director-v2 service", - env=["DYNAMIC_SIDECAR_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices( + "DYNAMIC_SIDECAR_LOG_LEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ) DYNAMIC_SIDECAR_IMAGE: str = Field( ..., - regex=DYNAMIC_SIDECAR_DOCKER_IMAGE_RE, + pattern=DYNAMIC_SIDECAR_DOCKER_IMAGE_RE, description="used by the director to start a specific version of the dynamic-sidecar", ) - DYNAMIC_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field(auto_default_from_env=True) + DYNAMIC_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS: AwsS3CliSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_EFS_SETTINGS: AwsEfsSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_PLACEMENT_SETTINGS: PlacementSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) # @@ -144,7 +148,7 @@ class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): DYNAMIC_SIDECAR_MOUNT_PATH_DEV: Path | None = Field( None, description="Host path to the dynamic-sidecar project. Used as source path to mount to the dynamic-sidecar [DEVELOPMENT ONLY]", - example="osparc-simcore/services/dynamic-sidecar", + examples=["osparc-simcore/services/dynamic-sidecar"], ) DYNAMIC_SIDECAR_PORT: PortInt = Field( @@ -157,12 +161,16 @@ class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): description="Publishes the service on localhost for debuging and testing [DEVELOPMENT ONLY]" "Can be used to access swagger doc from the host as http://127.0.0.1:30023/dev/doc " "where 30023 is the host published port", + validate_default=True, ) - @validator("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", pre=True) + @field_validator("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", mode="before") @classmethod - def auto_disable_if_production(cls, v, values): - if v and values.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") == BootModeEnum.PRODUCTION: + def auto_disable_if_production(cls, v, info: ValidationInfo): + if ( + v + and info.data.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") == BootModeEnum.PRODUCTION + ): _logger.warning( "In production DYNAMIC_SIDECAR_MOUNT_PATH_DEV cannot be set to %s, enforcing None", v, @@ -170,22 +178,22 @@ def auto_disable_if_production(cls, v, values): return None return v - @validator("DYNAMIC_SIDECAR_EXPOSE_PORT", pre=True, always=True) + @field_validator("DYNAMIC_SIDECAR_EXPOSE_PORT", mode="before") @classmethod - def auto_enable_if_development(cls, v, values): + def auto_enable_if_development(cls, v, info: ValidationInfo): if ( - boot_mode := values.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") + boot_mode := info.data.get("DYNAMIC_SIDECAR_SC_BOOT_MODE") ) and boot_mode.is_devel_mode(): # Can be used to access swagger doc from the host as http://127.0.0.1:30023/dev/doc return True return v - @validator("DYNAMIC_SIDECAR_IMAGE", pre=True) + @field_validator("DYNAMIC_SIDECAR_IMAGE", mode="before") @classmethod def strip_leading_slashes(cls, v: str) -> str: return v.lstrip("/") - @validator("DYNAMIC_SIDECAR_LOG_LEVEL") + @field_validator("DYNAMIC_SIDECAR_LOG_LEVEL") @classmethod def _validate_log_level(cls, value) -> str: log_level: str = cls.validate_log_level(value) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 3c63028747b..4fb86bd2029 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -20,7 +20,14 @@ ClusterTypeInModel, NoAuthentication, ) -from pydantic import AnyHttpUrl, AnyUrl, Field, NonNegativeInt, validator +from pydantic import ( + AliasChoices, + AnyHttpUrl, + AnyUrl, + Field, + NonNegativeInt, + field_validator, +) from settings_library.base import BaseCustomSettings from settings_library.catalog import CatalogSettings from settings_library.docker_registry import RegistrySettings @@ -58,7 +65,7 @@ def endpoint(self) -> str: url: str = AnyHttpUrl.build( scheme="http", host=self.DIRECTOR_HOST, - port=f"{self.DIRECTOR_PORT}", + port=self.DIRECTOR_PORT, path=f"/{self.DIRECTOR_V0_VTAG}", ) return url @@ -107,7 +114,7 @@ def default_cluster(self) -> Cluster: type=ClusterTypeInModel.ON_PREMISE, ) - @validator("COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", pre=True) + @field_validator("COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", mode="before") @classmethod def _empty_auth_is_none(cls, v): if not v: @@ -122,14 +129,14 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, - env=["DIRECTOR_V2_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], + validation_alias=AliasChoices("DIRECTOR_V2_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), ) DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( default=False, - env=[ + validation_alias=AliasChoices( "DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", - ], + ), description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) DIRECTOR_V2_DEV_FEATURES_ENABLED: bool = False @@ -161,7 +168,9 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None # extras - SWARM_STACK_NAME: str = Field("undefined-please-check", env="SWARM_STACK_NAME") + SWARM_STACK_NAME: str = Field( + "undefined-please-check", validation_alias="SWARM_STACK_NAME" + ) SERVICE_TRACKING_HEARTBEAT: datetime.timedelta = Field( default=DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL, description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)" @@ -183,34 +192,48 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): ) # debug settings - CLIENT_REQUEST: ClientRequestSettings = Field(auto_default_from_env=True) + CLIENT_REQUEST: ClientRequestSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) # App modules settings --------------------- - DIRECTOR_V2_STORAGE: StorageSettings = Field(auto_default_from_env=True) + DIRECTOR_V2_STORAGE: StorageSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: StorageAuthSettings | None = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) - DIRECTOR_V2_CATALOG: CatalogSettings | None = Field(auto_default_from_env=True) + DIRECTOR_V2_CATALOG: CatalogSettings | None = Field( + json_schema_extra={"auto_default_from_env": True} + ) - DIRECTOR_V0: DirectorV0Settings = Field(auto_default_from_env=True) + DIRECTOR_V0: DirectorV0Settings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - DYNAMIC_SERVICES: DynamicServicesSettings = Field(auto_default_from_env=True) + DYNAMIC_SERVICES: DynamicServicesSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - POSTGRES: PostgresSettings = Field(auto_default_from_env=True) + POSTGRES: PostgresSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) - REDIS: RedisSettings = Field(auto_default_from_env=True) + REDIS: RedisSettings = Field(json_schema_extra={"auto_default_from_env": True}) - DIRECTOR_V2_RABBITMQ: RabbitSettings = Field(auto_default_from_env=True) + DIRECTOR_V2_RABBITMQ: RabbitSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) TRAEFIK_SIMCORE_ZONE: str = Field("internal_simcore_stack") DIRECTOR_V2_COMPUTATIONAL_BACKEND: ComputationalBackendSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DIRECTOR_V2_DOCKER_REGISTRY: RegistrySettings = Field( - auto_default_from_env=True, + json_schema_extra={"auto_default_from_env": True}, description="settings for the private registry deployed with the platform", ) DIRECTOR_V2_DOCKER_HUB_REGISTRY: RegistrySettings | None = Field( @@ -218,7 +241,7 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): ) DIRECTOR_V2_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field( - auto_default_from_env=True, + json_schema_extra={"auto_default_from_env": True}, description="resource usage tracker service client's plugin", ) @@ -227,10 +250,11 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): description="Base URL used to access the public api e.g. http://127.0.0.1:6000 for development or https://api.osparc.io", ) DIRECTOR_V2_TRACING: TracingSettings | None = Field( - auto_default_from_env=True, description="settings for opentelemetry tracing" + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", ) - @validator("LOG_LEVEL", pre=True) + @field_validator("LOG_LEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: str) -> str: log_level: str = cls.validate_log_level(value) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py b/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py index 6e156607ae6..5de823d826b 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_pipelines.py @@ -1,10 +1,10 @@ from contextlib import suppress -from typing import Any, ClassVar, cast +from typing import cast import networkx as nx from models_library.projects import ProjectID from models_library.projects_state import RunningState -from pydantic import BaseModel, validator +from pydantic import BaseModel, ConfigDict, field_validator from simcore_postgres_database.models.comp_pipeline import StateType from ..utils.db import DB_TO_RUNNING_STATE @@ -15,7 +15,7 @@ class CompPipelineAtDB(BaseModel): dag_adjacency_list: dict[str, list[str]] # json serialization issue if using NodeID state: RunningState - @validator("state", pre=True) + @field_validator("state", mode="before") @classmethod def convert_state_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): @@ -27,7 +27,7 @@ def convert_state_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("dag_adjacency_list", pre=True) + @field_validator("dag_adjacency_list", mode="before") @classmethod def auto_convert_dag(cls, v): # this enforcement is here because the serialization using json is not happy with non str Dict keys, also comparison gets funny if the lists are having sometimes UUIDs or str. @@ -42,10 +42,9 @@ def get_graph(self) -> nx.DiGraph: ), ) - class Config: - orm_mode = True - - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ # DB model { @@ -60,4 +59,5 @@ class Config: "state": "NOT_STARTED", } ] - } + }, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index 1d7800b9788..f15de72866f 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -1,13 +1,13 @@ import datetime from contextlib import suppress -from typing import Any, ClassVar, TypedDict +from typing import TypedDict from models_library.clusters import DEFAULT_CLUSTER_ID, ClusterID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState from models_library.users import UserID -from pydantic import BaseModel, PositiveInt, validator +from pydantic import BaseModel, ConfigDict, PositiveInt, field_validator from simcore_postgres_database.models.comp_pipeline import StateType from ..utils.db import DB_TO_RUNNING_STATE @@ -39,17 +39,17 @@ class CompRunsAtDB(BaseModel): run_id: PositiveInt project_uuid: ProjectID user_id: UserID - cluster_id: ClusterID | None + cluster_id: ClusterID | None = None iteration: PositiveInt result: RunningState created: datetime.datetime modified: datetime.datetime - started: datetime.datetime | None - ended: datetime.datetime | None + started: datetime.datetime | None = None + ended: datetime.datetime | None = None metadata: RunMetadataDict = RunMetadataDict() use_on_demand_clusters: bool - @validator("result", pre=True) + @field_validator("result", mode="before") @classmethod def convert_result_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): @@ -61,30 +61,30 @@ def convert_result_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("cluster_id", pre=True) + @field_validator("cluster_id", mode="before") @classmethod def convert_null_to_default_cluster_id(cls, v): if v is None: v = DEFAULT_CLUSTER_ID return v - @validator("created", "modified", "started", "ended") + @field_validator("created", "modified", "started", "ended") @classmethod def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: if v is not None and v.tzinfo is None: - v = v.replace(tzinfo=datetime.timezone.utc) + v = v.replace(tzinfo=datetime.UTC) return v - @validator("metadata", pre=True) + @field_validator("metadata", mode="before") @classmethod def convert_null_to_empty_metadata(cls, v): if v is None: v = RunMetadataDict() return v - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ # DB model { @@ -119,4 +119,5 @@ class Config: "use_on_demand_clusters": False, }, ] - } + }, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 5895411b2d0..d4d99c83c08 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -1,6 +1,6 @@ import datetime from contextlib import suppress -from typing import Any, ClassVar +from typing import Any from dask_task_models_library.container_tasks.protocol import ContainerEnvsDict from models_library.api_schemas_directorv2.services import NodeRequirements @@ -17,11 +17,11 @@ from pydantic import ( BaseModel, ByteSize, - Extra, + ConfigDict, Field, PositiveInt, + field_validator, parse_obj_as, - validator, ) from simcore_postgres_database.models.comp_pipeline import StateType from simcore_postgres_database.models.comp_tasks import NodeClass @@ -30,8 +30,8 @@ class Image(BaseModel): - name: str = Field(..., regex=SERVICE_KEY_RE.pattern) - tag: str = Field(..., regex=SIMPLE_VERSION_RE) + name: str = Field(..., pattern=SERVICE_KEY_RE.pattern) + tag: str = Field(..., pattern=SIMPLE_VERSION_RE) requires_gpu: bool | None = Field( default=None, deprecated=True, description="Use instead node_requirements" @@ -40,7 +40,9 @@ class Image(BaseModel): default=None, deprecated=True, description="Use instead node_requirements" ) node_requirements: NodeRequirements | None = Field( - default=None, description="the requirements for the service to run on a node" + default=None, + description="the requirements for the service to run on a node", + validate_default=True, ) boot_mode: BootMode = BootMode.CPU command: list[str] = Field( @@ -53,7 +55,7 @@ class Image(BaseModel): default_factory=dict, description="The environment to use to run the service" ) - @validator("node_requirements", pre=True, always=True) + @field_validator("node_requirements", mode="before") @classmethod def migrate_from_requirements(cls, v, values): if v is None: @@ -68,9 +70,9 @@ def migrate_from_requirements(cls, v, values): ) return v - class Config: - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ "examples": [ { "name": "simcore/services/dynamic/jupyter-octave-python-math", @@ -89,14 +91,14 @@ class Config: "requires_mpi": False, } ] - } + }, + ) -# NOTE: for a long time defaultValue field was added to ServiceOutput wrongly in the DB. -# this flags allows parsing of the outputs without error. This MUST not leave the director-v2! class _ServiceOutputOverride(ServiceOutput): - class Config(ServiceOutput.Config): - extra = Extra.ignore + # NOTE: for a long time defaultValue field was added to ServiceOutput wrongly in the DB. + # this flags allows parsing of the outputs without error. This MUST not leave the director-v2! + model_config = ConfigDict(extra="ignore") _ServiceOutputsOverride = dict[ServicePortKey, _ServiceOutputOverride] @@ -105,10 +107,7 @@ class Config(ServiceOutput.Config): class NodeSchema(BaseModel): inputs: ServiceInputsDict = Field(..., description="the inputs scheam") outputs: _ServiceOutputsOverride = Field(..., description="the outputs schema") - - class Config: - extra = Extra.forbid - orm_mode = True + model_config = ConfigDict(extra="forbid", from_attributes=True) class CompTaskAtDB(BaseModel): @@ -145,10 +144,10 @@ class CompTaskAtDB(BaseModel): created: datetime.datetime modified: datetime.datetime # Additional information about price and hardware (ex. AWS EC2 instance type) - pricing_info: dict | None + pricing_info: dict | None = None hardware_info: HardwareInfo - @validator("state", pre=True) + @field_validator("state", mode="before") @classmethod def convert_state_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): @@ -160,14 +159,14 @@ def convert_state_from_state_type_enum_if_needed(cls, v): return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) return v - @validator("start", "end", "submit") + @field_validator("start", "end", "submit") @classmethod def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: if v is not None and v.tzinfo is None: - v = v.replace(tzinfo=datetime.timezone.utc) + v = v.replace(tzinfo=datetime.UTC) return v - @validator("hardware_info", pre=True) + @field_validator("hardware_info", mode="before") @classmethod def backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo: if v is None: @@ -180,10 +179,10 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: comp_task_dict["state"] = RUNNING_STATE_TO_DB[comp_task_dict["state"]].value return comp_task_dict - class Config: - extra = Extra.forbid - orm_mode = True - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + extra="forbid", + from_attributes=True, + json_schema_extra={ "examples": [ # DB model { @@ -239,4 +238,5 @@ class Config: } for image_example in Image.Config.schema_extra["examples"] ] - } + }, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 17408a202ec..5711cec6eea 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -1,12 +1,11 @@ import json import logging -import re from collections.abc import Mapping from datetime import datetime from enum import Enum from functools import cached_property from pathlib import Path -from typing import Any, TypeAlias +from typing import Annotated, Any, TypeAlias from uuid import UUID import arrow @@ -31,11 +30,11 @@ from pydantic import ( AnyHttpUrl, BaseModel, - ConstrainedStr, - Extra, + ConfigDict, Field, - parse_obj_as, - validator, + StringConstraints, + TypeAdapter, + field_validator, ) from servicelib.exception_utils import DelayedExceptionHandler @@ -55,18 +54,17 @@ DockerStatus: TypeAlias = Status2 -class DockerId(ConstrainedStr): - max_length = 25 - regex = re.compile(r"[A-Za-z0-9]{25}") - +DockerId: TypeAlias = Annotated[ + str, StringConstraints(max_length=25, pattern=r"[A-Za-z0-9]{25}") +] ServiceId: TypeAlias = DockerId NetworkId: TypeAlias = DockerId -class ServiceName(ConstrainedStr): - strip_whitespace = True - min_length = 2 +ServiceName: TypeAlias = Annotated[ + str, StringConstraints(min_length=2, strip_whitespace=True) +] logger = logging.getLogger() @@ -141,9 +139,9 @@ def from_container(cls, container: dict[str, Any]) -> "DockerContainerInspect": id=container["Id"], ) - class Config: - keep_untouched = (cached_property,) - allow_mutation = False + # TODO[pydantic]: The following keys were removed: `allow_mutation`. + # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information. + model_config = ConfigDict(ignored_types=(cached_property,), allow_mutation=False) class ServiceRemovalState(BaseModel): @@ -317,9 +315,7 @@ def compose_spec_submitted(self) -> bool: "this value will be set to None." ), ) - - class Config: - validate_assignment = True + model_config = ConfigDict(validate_assignment=True) class DynamicSidecarNamesHelper(BaseModel): @@ -337,25 +333,25 @@ class DynamicSidecarNamesHelper(BaseModel): service_name_dynamic_sidecar: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, max_length=MAX_ALLOWED_SERVICE_NAME_LENGTH, description="unique name of the dynamic-sidecar service", ) proxy_service_name: str = Field( ..., - regex=REGEX_DY_SERVICE_PROXY, + pattern=REGEX_DY_SERVICE_PROXY, max_length=MAX_ALLOWED_SERVICE_NAME_LENGTH, description="name of the proxy for the dynamic-sidecar", ) simcore_traefik_zone: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, description="unique name for the traefik constraints", ) dynamic_sidecar_network_name: str = Field( ..., - regex=REGEX_DY_SERVICE_SIDECAR, + pattern=REGEX_DY_SERVICE_SIDECAR, description="based on the node_id and project_id", ) @@ -392,15 +388,13 @@ class SchedulerData(CommonServiceDetails, DynamicSidecarServiceLabels): hostname: str = Field( ..., description="dy-sidecar's service hostname (provided by docker-swarm)" ) - port: PortInt = Field( - default=parse_obj_as(PortInt, 8000), description="dynamic-sidecar port" - ) + port: PortInt = Field(default=8000, description="dynamic-sidecar port") @property def endpoint(self) -> AnyHttpUrl: """endpoint where all the services are exposed""" - url: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, f"http://{self.hostname}:{self.port}" # NOSONAR + url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( + f"http://{self.hostname}:{self.port}" # NOSONAR ) return url @@ -425,7 +419,7 @@ def endpoint(self) -> AnyHttpUrl: ) service_port: PortInt = Field( - default=parse_obj_as(PortInt, TEMPORARY_PORT_NUMBER), + default=TEMPORARY_PORT_NUMBER, description=( "port where the service is exposed defined by the service; " "NOTE: temporary default because it will be changed once the service " @@ -470,8 +464,7 @@ def endpoint(self) -> AnyHttpUrl: def get_proxy_endpoint(self) -> AnyHttpUrl: """get the endpoint where the proxy's admin API is exposed""" assert self.proxy_admin_api_port # nosec - url: AnyHttpUrl = parse_obj_as( - AnyHttpUrl, + url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( f"http://{self.proxy_service_name}:{self.proxy_admin_api_port}", # nosec # NOSONAR ) return url @@ -528,9 +521,9 @@ def from_http_request( } if run_id: obj_dict["run_id"] = run_id - return cls.parse_obj(obj_dict) + return cls.model_validate(obj_dict) - @validator("user_preferences_path", pre=True) + @field_validator("user_preferences_path", mode="before") @classmethod def strip_path_serialization_to_none(cls, v): if v == "None": @@ -542,15 +535,13 @@ def from_service_inspect( cls, service_inspect: Mapping[str, Any] ) -> "SchedulerData": labels = service_inspect["Spec"]["Labels"] - return cls.parse_raw(labels[DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL]) + return cls.model_validate_json(labels[DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL]) def as_label_data(self) -> str: # compose_spec needs to be json encoded before encoding it to json # and storing it in the label - return self.copy( + return self.model_copy( update={"compose_spec": json.dumps(self.compose_spec)}, deep=True - ).json() + ).model_dump_json() - class Config: - extra = Extra.allow - allow_population_by_field_name = True + model_config = ConfigDict(extra="allow", populate_by_name=True) diff --git a/services/director-v2/src/simcore_service_director_v2/models/pricing.py b/services/director-v2/src/simcore_service_director_v2/models/pricing.py index 4aabef7cd10..52a61d8c9e3 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/pricing.py +++ b/services/director-v2/src/simcore_service_director_v2/models/pricing.py @@ -1,12 +1,11 @@ from decimal import Decimal -from typing import Any, ClassVar from models_library.resource_tracker import ( PricingPlanId, PricingUnitCostId, PricingUnitId, ) -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict class PricingInfo(BaseModel): @@ -14,9 +13,8 @@ class PricingInfo(BaseModel): pricing_unit_id: PricingUnitId pricing_unit_cost_id: PricingUnitCostId pricing_unit_cost: Decimal - - class Config: - schema_extra: ClassVar[dict[str, Any]] = { + model_config = ConfigDict( + json_schema_extra={ "examples": [ { "pricing_plan_id": 1, @@ -26,3 +24,4 @@ class Config: } ] } + ) From 6ac07508b8fe86e253d8c02fc9f325241fbe2cfc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 28 Oct 2024 16:37:48 +0100 Subject: [PATCH 003/121] continue fixing --- .../models/comp_runs.py | 2 +- .../models/comp_tasks.py | 10 +- services/director-v2/tests/conftest.py | 4 +- .../tests/integration/02/conftest.py | 4 +- services/director-v2/tests/unit/conftest.py | 4 +- .../tests/unit/test_modules_dask_client.py | 4 +- ...es_dynamic_sidecar_docker_compose_specs.py | 2 +- .../test_modules_dynamic_sidecar_scheduler.py | 6 +- .../tests/unit/test_utils_comp_scheduler.py | 2 +- .../tests/unit/with_dbs/conftest.py | 2 +- .../with_dbs/test_api_route_computations.py | 34 +++-- .../test_api_route_dynamic_services.py | 125 +++++++++++++----- .../tests/unit/with_dbs/test_cli.py | 4 +- ...test_modules_dynamic_sidecar_docker_api.py | 4 +- ...es_dynamic_sidecar_docker_service_specs.py | 24 ++-- .../tests/unit/with_dbs/test_utils_dask.py | 2 +- 16 files changed, 166 insertions(+), 67 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index f15de72866f..c14d6d6a547 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -1,6 +1,5 @@ import datetime from contextlib import suppress -from typing import TypedDict from models_library.clusters import DEFAULT_CLUSTER_ID, ClusterID from models_library.projects import ProjectID @@ -9,6 +8,7 @@ from models_library.users import UserID from pydantic import BaseModel, ConfigDict, PositiveInt, field_validator from simcore_postgres_database.models.comp_pipeline import StateType +from typing_extensions import TypedDict from ..utils.db import DB_TO_RUNNING_STATE diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index d4d99c83c08..844919cbde9 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -79,7 +79,9 @@ def migrate_from_requirements(cls, v, values): "tag": "1.3.1", "node_requirements": node_req_example, } - for node_req_example in NodeRequirements.Config.schema_extra["examples"] + for node_req_example in NodeRequirements.model_config[ + "json_schema_extra" + ]["examples"] ] + # old version @@ -234,9 +236,11 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: "pricing_unit_id": 1, "pricing_unit_cost_id": 1, }, - "hardware_info": HardwareInfo.Config.schema_extra["examples"][0], + "hardware_info": HardwareInfo.model_config["json_schema_extra"][ + "examples" + ][0], } - for image_example in Image.Config.schema_extra["examples"] + for image_example in Image.model_config["json_schema_extra"]["examples"] ] }, ) diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index db64158d6d5..a1ec0c8bd4d 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -336,7 +336,9 @@ async def wrapper(*args, **kwargs): @pytest.fixture def mock_osparc_variables_api_auth_rpc(mocker: MockerFixture) -> None: - fake_data = ApiKeyGet.parse_obj(ApiKeyGet.Config.schema_extra["examples"][0]) + fake_data = ApiKeyGet.parse_obj( + ApiKeyGet.model_config["json_schema_extra"]["examples"][0] + ) async def _create( app: FastAPI, diff --git a/services/director-v2/tests/integration/02/conftest.py b/services/director-v2/tests/integration/02/conftest.py index 0d0df8a402f..6bde0a5d6f5 100644 --- a/services/director-v2/tests/integration/02/conftest.py +++ b/services/director-v2/tests/integration/02/conftest.py @@ -74,7 +74,7 @@ def mock_projects_networks_repository(mocker: MockerFixture) -> None: def service_resources() -> ServiceResourcesDict: return parse_obj_as( ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0], + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) @@ -82,7 +82,7 @@ def service_resources() -> ServiceResourcesDict: def mock_resource_usage_tracker(mocker: MockerFixture) -> None: base_module = "simcore_service_director_v2.modules.resource_usage_tracker_client" service_pricing_plan = PricingPlanGet.parse_obj( - PricingPlanGet.Config.schema_extra["examples"][1] + PricingPlanGet.model_config["json_schema_extra"]["examples"][1] ) for unit in service_pricing_plan.pricing_units: unit.specific_info.aws_ec2_instances.clear() diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index ecd7da59544..2e53bf1fab8 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -53,7 +53,7 @@ def simcore_services_network_name() -> str: @pytest.fixture def simcore_service_labels() -> SimcoreServiceLabels: simcore_service_labels = SimcoreServiceLabels.parse_obj( - SimcoreServiceLabels.Config.schema_extra["examples"][1] + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][1] ) simcore_service_labels.callbacks_mapping = parse_obj_as(CallbacksMapping, {}) return simcore_service_labels @@ -62,7 +62,7 @@ def simcore_service_labels() -> SimcoreServiceLabels: @pytest.fixture def dynamic_service_create() -> DynamicServiceCreate: return DynamicServiceCreate.parse_obj( - DynamicServiceCreate.Config.schema_extra["example"] + DynamicServiceCreate.model_config["json_schema_extra"]["example"] ) diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index f63381c538b..15720935aae 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -482,7 +482,9 @@ def task_labels(comp_run_metadata: RunMetadataDict) -> ContainerLabelsDict: @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.parse_obj(HardwareInfo.Config.schema_extra["examples"][0]) + return HardwareInfo.parse_obj( + HardwareInfo.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py index 8b390e7b973..d06444cf155 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py @@ -156,7 +156,7 @@ async def test_inject_resource_limits_and_reservations( [ pytest.param( json.loads( - SimcoreServiceLabels.Config.schema_extra["examples"][2][ + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2][ "simcore.service.compose-spec" ] ), diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py index 13d617ed82d..13763953d07 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py @@ -162,7 +162,11 @@ def mocked_director_v0( ), name="service labels", ).respond( - json={"data": SimcoreServiceLabels.Config.schema_extra["examples"][0]} + json={ + "data": SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0] + } ) yield mock diff --git a/services/director-v2/tests/unit/test_utils_comp_scheduler.py b/services/director-v2/tests/unit/test_utils_comp_scheduler.py index 970cdad75b7..ff636be1736 100644 --- a/services/director-v2/tests/unit/test_utils_comp_scheduler.py +++ b/services/director-v2/tests/unit/test_utils_comp_scheduler.py @@ -79,7 +79,7 @@ def test_get_resource_tracking_run_id( "task", [ CompTaskAtDB.parse_obj(example) - for example in CompTaskAtDB.Config.schema_extra["examples"] + for example in CompTaskAtDB.model_config["json_schema_extra"]["examples"] ], ids=str, ) diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 8dd5527f00a..c508f8e7ada 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -223,7 +223,7 @@ def cluster( created_cluster_ids: list[str] = [] def creator(user: dict[str, Any], **cluster_kwargs) -> Cluster: - cluster_config = Cluster.Config.schema_extra["examples"][1] + cluster_config = Cluster.model_config["json_schema_extra"]["examples"][1] cluster_config["owner"] = user["primary_gid"] cluster_config.update(**cluster_kwargs) new_cluster = Cluster.parse_obj(cluster_config) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 81034fbaee5..73a5ca6b447 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -109,7 +109,7 @@ def fake_service_details(mocks_dir: Path) -> ServiceMetaDataPublished: @pytest.fixture def fake_service_extras() -> ServiceExtras: - extra_example = ServiceExtras.Config.schema_extra["examples"][2] + extra_example = ServiceExtras.model_config["json_schema_extra"]["examples"][2] random_extras = ServiceExtras(**extra_example) assert random_extras is not None return random_extras @@ -119,13 +119,15 @@ def fake_service_extras() -> ServiceExtras: def fake_service_resources() -> ServiceResourcesDict: return parse_obj_as( ServiceResourcesDict, - ServiceResourcesDictHelpers.Config.schema_extra["examples"][0], + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) @pytest.fixture def fake_service_labels() -> dict[str, Any]: - return choice(SimcoreServiceLabels.Config.schema_extra["examples"]) # noqa: S311 + return choice( + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"] + ) # noqa: S311 @pytest.fixture @@ -257,7 +259,7 @@ def _mocked_services_details( @pytest.fixture( - params=PricingPlanGet.Config.schema_extra["examples"], + params=PricingPlanGet.model_config["json_schema_extra"]["examples"], ids=["with ec2 restriction", "without"], ) def default_pricing_plan(request: pytest.FixtureRequest) -> PricingPlanGet: @@ -301,7 +303,7 @@ def _mocked_get_pricing_unit(request, pricing_plan_id: int) -> httpx.Response: ( default_pricing_plan.pricing_units[0] if default_pricing_plan.pricing_units - else PricingUnitGet.Config.schema_extra["examples"][0] + else PricingUnitGet.model_config["json_schema_extra"]["examples"][0] ), by_alias=True, ), @@ -461,7 +463,9 @@ def mocked_clusters_keeper_service_get_instance_type_details_with_invalid_name( ) -@pytest.fixture(params=ServiceResourcesDictHelpers.Config.schema_extra["examples"]) +@pytest.fixture( + params=ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"] +) def project_nodes_overrides(request: pytest.FixtureRequest) -> dict[str, Any]: return request.param @@ -570,7 +574,11 @@ async def test_create_computation_with_wallet( @pytest.mark.parametrize( "default_pricing_plan", - [PricingPlanGet.construct(**PricingPlanGet.Config.schema_extra["examples"][0])], + [ + PricingPlanGet.construct( + **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] + ) + ], ) async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_raises_422( minimal_configuration: None, @@ -608,7 +616,11 @@ async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_rai @pytest.mark.parametrize( "default_pricing_plan", - [PricingPlanGet.construct(**PricingPlanGet.Config.schema_extra["examples"][0])], + [ + PricingPlanGet.construct( + **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] + ) + ], ) async def test_create_computation_with_wallet_with_no_clusters_keeper_raises_503( minimal_configuration: None, @@ -709,9 +721,9 @@ async def test_start_computation_with_project_node_resources_defined( proj = await project( user, project_nodes_overrides={ - "required_resources": ServiceResourcesDictHelpers.Config.schema_extra[ - "examples" - ][0] + "required_resources": ServiceResourcesDictHelpers.model_config[ + "json_schema_extra" + ]["examples"][0] }, workbench=fake_workbench_without_outputs, ) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py index cc0246bfec9..c0a48e3da95 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py @@ -163,11 +163,17 @@ async def mock_retrieve_features( ) as respx_mock: if is_legacy: service_details = RunningDynamicServiceDetails.parse_obj( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + RunningDynamicServiceDetails.model_config["json_schema_extra"][ + "examples" + ][0] ) respx_mock.post( f"{service_details.legacy_service_url}/retrieve", name="retrieve" - ).respond(json=RetrieveDataOutEnveloped.Config.schema_extra["examples"][0]) + ).respond( + json=RetrieveDataOutEnveloped.model_config["json_schema_extra"][ + "examples" + ][0] + ) yield respx_mock # no cleanup required @@ -230,7 +236,9 @@ def mocked_director_v0_service_api( name="running interactive service", ).respond( json={ - "data": RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + "data": RunningDynamicServiceDetails.model_config["json_schema_extra"][ + "examples" + ][0] } ) @@ -247,7 +255,9 @@ def get_stack_status(node_uuid: NodeID) -> RunningDynamicServiceDetails: raise DynamicSidecarNotFoundError(node_uuid) return RunningDynamicServiceDetails.parse_obj( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + RunningDynamicServiceDetails.model_config["json_schema_extra"]["examples"][ + 0 + ] ) module_base = "simcore_service_director_v2.modules.dynamic_sidecar.scheduler" @@ -279,8 +289,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -288,8 +302,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ), @@ -297,8 +315,12 @@ def remove_service(node_uuid: NodeID, *ars: Any, **kwargs: Any) -> None: ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ), @@ -351,8 +373,12 @@ def test_create_dynamic_services( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -360,8 +386,12 @@ def test_create_dynamic_services( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -369,8 +399,12 @@ def test_create_dynamic_services( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -409,8 +443,12 @@ def test_get_service_status( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_307_TEMPORARY_REDIRECT, is_legacy=True, ), @@ -418,8 +456,12 @@ def test_get_service_status( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_204_NO_CONTENT, is_legacy=False, ), @@ -427,8 +469,12 @@ def test_get_service_status( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_204_NO_CONTENT, is_legacy=False, ), @@ -481,8 +527,12 @@ def dynamic_sidecar_scheduler(minimal_app: FastAPI) -> DynamicSidecarsScheduler: [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_201_CREATED, is_legacy=False, ) @@ -528,8 +578,12 @@ def test_delete_service_waiting_for_manual_intervention( [ pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][0], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][0], exp_status_code=status.HTTP_200_OK, is_legacy=True, ), @@ -537,8 +591,12 @@ def test_delete_service_waiting_for_manual_intervention( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][1], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][1], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -546,8 +604,12 @@ def test_delete_service_waiting_for_manual_intervention( ), pytest.param( *ServiceParams( - service=DynamicServiceCreate.Config.schema_extra["example"], - service_labels=SimcoreServiceLabels.Config.schema_extra["examples"][2], + service=DynamicServiceCreate.model_config["json_schema_extra"][ + "example" + ], + service_labels=SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ][2], exp_status_code=status.HTTP_200_OK, is_legacy=False, ), @@ -571,7 +633,8 @@ def test_retrieve( response.status_code == exp_status_code ), f"expected status code {exp_status_code}, received {response.status_code}: {response.text}" assert ( - response.json() == RetrieveDataOutEnveloped.Config.schema_extra["examples"][0] + response.json() + == RetrieveDataOutEnveloped.model_config["json_schema_extra"]["examples"][0] ) diff --git a/services/director-v2/tests/unit/with_dbs/test_cli.py b/services/director-v2/tests/unit/with_dbs/test_cli.py index 43beec85900..da0bc605603 100644 --- a/services/director-v2/tests/unit/with_dbs/test_cli.py +++ b/services/director-v2/tests/unit/with_dbs/test_cli.py @@ -107,7 +107,9 @@ def mock_get_node_state(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_director_v2.cli._core._get_dy_service_state", return_value=DynamicServiceGet.parse_obj( - RunningDynamicServiceDetails.Config.schema_extra["examples"][0] + RunningDynamicServiceDetails.model_config["json_schema_extra"]["examples"][ + 0 + ] ), ) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 0536261ed62..3be8d77c6f5 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -331,7 +331,9 @@ def service_name() -> str: @pytest.fixture( params=[ SimcoreServiceLabels.parse_obj(example) - for example in SimcoreServiceLabels.Config.schema_extra["examples"] + for example in SimcoreServiceLabels.model_config["json_schema_extra"][ + "examples" + ] ], ) def labels_example(request: pytest.FixtureRequest) -> SimcoreServiceLabels: diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index edca0dfe03c..66b2022d7c0 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -49,7 +49,9 @@ @pytest.fixture def mock_s3_settings() -> S3Settings: - return S3Settings.parse_obj(S3Settings.Config.schema_extra["examples"][0]) + return S3Settings.parse_obj( + S3Settings.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture @@ -116,13 +118,15 @@ def swarm_network_id() -> str: def simcore_service_labels() -> SimcoreServiceLabels: # overwrites global fixture return SimcoreServiceLabels.parse_obj( - SimcoreServiceLabels.Config.schema_extra["examples"][2] + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2] ) @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.parse_obj(HardwareInfo.Config.schema_extra["examples"][0]) + return HardwareInfo.parse_obj( + HardwareInfo.model_config["json_schema_extra"]["examples"][0] + ) @pytest.fixture @@ -180,9 +184,9 @@ def expected_dynamic_sidecar_spec( "state_exclude": ["/tmp/strip_me/*", "*.py"], # noqa: S108 "state_paths": ["/tmp/save_1", "/tmp_save_2"], # noqa: S108 }, - "callbacks_mapping": CallbacksMapping.Config.schema_extra[ - "examples" - ][3], + "callbacks_mapping": CallbacksMapping.model_config[ + "json_schema_extra" + ]["examples"][3], "product_name": osparc_product_name, "project_id": "dd1d04d9-d704-4f7e-8f0f-1ca60cc771fe", "proxy_service_name": "dy-proxy_75c7f3f4-18f9-4678-8610-54a2ade78eaa", @@ -190,8 +194,12 @@ def expected_dynamic_sidecar_spec( "request_scheme": "http", "request_simcore_user_agent": request_simcore_user_agent, "restart_policy": "on-inputs-downloaded", - "wallet_info": WalletInfo.Config.schema_extra["examples"][0], - "pricing_info": PricingInfo.Config.schema_extra["examples"][0], + "wallet_info": WalletInfo.model_config["json_schema_extra"][ + "examples" + ][0], + "pricing_info": PricingInfo.model_config["json_schema_extra"][ + "examples" + ][0], "hardware_info": hardware_info, "service_name": "dy-sidecar_75c7f3f4-18f9-4678-8610-54a2ade78eaa", "service_port": 65534, diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index 9a2a93d3a33..d8d95f93572 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -494,7 +494,7 @@ def _add_is_directory(entry: mock._Call) -> mock._Call: # noqa: SLF001 @pytest.mark.parametrize( - "req_example", NodeRequirements.Config.schema_extra["examples"] + "req_example", NodeRequirements.model_config["json_schema_extra"]["examples"] ) def test_node_requirements_correctly_convert_to_dask_resources( req_example: dict[str, Any] From 61128868f9290dfb5406c3c2cef69bdd45131b80 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 29 Oct 2024 10:38:59 +0100 Subject: [PATCH 004/121] continue fixing --- packages/models-library/src/models_library/errors.py | 4 +++- .../simcore_service_director_v2/models/comp_tasks.py | 11 ++++++----- .../test_models_schemas_dynamic_services_scheduler.py | 4 ++-- .../test_modules_dynamic_sidecar_client_api_thin.py | 4 ++-- 4 files changed, 13 insertions(+), 10 deletions(-) diff --git a/packages/models-library/src/models_library/errors.py b/packages/models-library/src/models_library/errors.py index d672008646c..e33ec782d8d 100644 --- a/packages/models-library/src/models_library/errors.py +++ b/packages/models-library/src/models_library/errors.py @@ -1,4 +1,6 @@ -from typing import Any, TypedDict +from typing import Any +from typing_extensions import TypedDict + Loc = tuple[int | str, ...] diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 844919cbde9..dca5279087f 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -20,8 +20,9 @@ ConfigDict, Field, PositiveInt, + TypeAdapter, + ValidationInfo, field_validator, - parse_obj_as, ) from simcore_postgres_database.models.comp_pipeline import StateType from simcore_postgres_database.models.comp_tasks import NodeClass @@ -57,7 +58,7 @@ class Image(BaseModel): @field_validator("node_requirements", mode="before") @classmethod - def migrate_from_requirements(cls, v, values): + def migrate_from_requirements(cls, v, info: ValidationInfo): if v is None: # NOTE: 'node_requirements' field's default=None although is NOT declared as nullable. # Then this validator with `pre=True, always=True` is used to create a default @@ -65,8 +66,8 @@ def migrate_from_requirements(cls, v, values): # This strategy guarantees backwards compatibility v = NodeRequirements( CPU=1.0, - GPU=1 if values.get("requires_gpu") else 0, - RAM=parse_obj_as(ByteSize, "128 MiB"), + GPU=1 if info.data.get("requires_gpu") else 0, + RAM=TypeAdapter(ByteSize).validate_python("128 MiB"), ) return v @@ -176,7 +177,7 @@ def backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo: return v def to_db_model(self, **exclusion_rules) -> dict[str, Any]: - comp_task_dict = self.dict(by_alias=True, exclude_unset=True, **exclusion_rules) + comp_task_dict = self.model_dump(by_alias=True, exclude_unset=True, **exclusion_rules) if "state" in comp_task_dict: comp_task_dict["state"] = RUNNING_STATE_TO_DB[comp_task_dict["state"]].value return comp_task_dict diff --git a/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py b/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py index 607b8231f78..16c249b3470 100644 --- a/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py +++ b/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py @@ -4,8 +4,8 @@ from copy import deepcopy from pathlib import Path +from pydantic import TypeAdapter import pytest -from pydantic import parse_file_as from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData @@ -35,4 +35,4 @@ def test_ensure_legacy_format_compatibility(legacy_scheduler_data_format: Path): # PRs applying changes to the legacy format: # - https://github.com/ITISFoundation/osparc-simcore/pull/3610 - assert parse_file_as(list[SchedulerData], legacy_scheduler_data_format) + assert TypeAdapter(list[SchedulerData]).validate_json(legacy_scheduler_data_format.read_text()) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index 6403be5a78e..cf4e0bc6e00 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -11,7 +11,7 @@ from httpx import Response from models_library.services_creation import CreateServiceMetricsAdditionalParams from models_library.sidecar_volumes import VolumeCategory, VolumeStatus -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_simcore.helpers.typing_env import EnvVarsDict from respx import MockRouter, Route from respx.types import SideEffectTypes @@ -63,7 +63,7 @@ async def thin_client(mocked_app: FastAPI) -> AsyncIterable[ThinSidecarsClient]: @pytest.fixture def dynamic_sidecar_endpoint() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") + return TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111") @pytest.fixture From c0cc0529a2d474486127699780891d5e2248e505 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 1 Nov 2024 11:24:42 +0100 Subject: [PATCH 005/121] fix validation --- .../src/models_library/api_schemas_directorv2/clusters.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index 1c9892a7201..7889f68196b 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -7,6 +7,7 @@ Field, HttpUrl, NonNegativeFloat, + ValidationInfo, field_validator, model_validator, ) @@ -154,9 +155,9 @@ class ClusterCreate(BaseCluster): @field_validator("thumbnail", mode="before") @classmethod - def set_default_thumbnail_if_empty(cls, v, values): + def set_default_thumbnail_if_empty(cls, v, info: ValidationInfo): if v is None: - cluster_type = values["type"] + cluster_type = info.data["type"] default_thumbnails = { ClusterTypeInModel.AWS.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/9/93/Amazon_Web_Services_Logo.svg/250px-Amazon_Web_Services_Logo.svg.png", ClusterTypeInModel.ON_PREMISE.value: "https://upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Crystal_Clear_app_network_local.png/120px-Crystal_Clear_app_network_local.png", From 4f32d2d35a0fd5d0c1bd75266daefbf3c0455aae Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 1 Nov 2024 11:35:28 +0100 Subject: [PATCH 006/121] pin pydantic-settings --- services/director-v2/requirements/_base.txt | 3 ++- services/director-v2/requirements/constraints.txt | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 3922513db61..d6e2b383fcd 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -655,8 +655,9 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # fastapi -pydantic-settings==2.6.0 +pydantic-settings==2.5.2 # via + # -c requirements/./constraints.txt # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in diff --git a/services/director-v2/requirements/constraints.txt b/services/director-v2/requirements/constraints.txt index e69de29bb2d..c02043ff33a 100644 --- a/services/director-v2/requirements/constraints.txt +++ b/services/director-v2/requirements/constraints.txt @@ -0,0 +1,2 @@ +# GCR remove me +pydantic-settings<2.6 \ No newline at end of file From 7fa57285236ee7870ea3d199c174851ef6cea80c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 5 Nov 2024 21:19:37 +0100 Subject: [PATCH 007/121] fix errors --- .../core/errors.py | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index e8c47a934d3..dac383a10e3 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -19,10 +19,10 @@ } """ +from common_library.errors_classes import OsparcErrorMixin from models_library.errors import ErrorDict from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from pydantic.errors import PydanticErrorMixin class DirectorError(Exception): @@ -114,15 +114,15 @@ def __init__(self, pipeline_id: str): super().__init__(f"pipeline {pipeline_id} not found") -class ComputationalRunNotFoundError(PydanticErrorMixin, DirectorError): +class ComputationalRunNotFoundError(OsparcErrorMixin, DirectorError): msg_template = "Computational run not found" -class ComputationalTaskNotFoundError(PydanticErrorMixin, DirectorError): +class ComputationalTaskNotFoundError(OsparcErrorMixin, DirectorError): msg_template = "Computational task {node_id} not found" -class WalletNotEnoughCreditsError(PydanticErrorMixin, DirectorError): +class WalletNotEnoughCreditsError(OsparcErrorMixin, DirectorError): msg_template = "Wallet '{wallet_name}' has {wallet_credit_amount} credits." @@ -227,38 +227,38 @@ def get_errors(self) -> list[ErrorDict]: return value_errors -class ComputationalSchedulerChangedError(PydanticErrorMixin, SchedulerError): +class ComputationalSchedulerChangedError(OsparcErrorMixin, SchedulerError): code = "computational_backend.scheduler_changed" msg_template = "The dask scheduler ID changed from '{original_scheduler_id}' to '{current_scheduler_id}'" -class ComputationalBackendNotConnectedError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendNotConnectedError(OsparcErrorMixin, SchedulerError): code = "computational_backend.not_connected" msg_template = "The dask computational backend is not connected" -class ComputationalBackendNoS3AccessError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendNoS3AccessError(OsparcErrorMixin, SchedulerError): msg_template = "The S3 backend is not ready, please try again later" -class ComputationalBackendTaskNotFoundError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendTaskNotFoundError(OsparcErrorMixin, SchedulerError): code = "computational_backend.task_not_found" msg_template = ( "The dask computational backend does not know about the task '{job_id}'" ) -class ComputationalBackendTaskResultsNotReadyError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendTaskResultsNotReadyError(OsparcErrorMixin, SchedulerError): code = "computational_backend.task_result_not_ready" msg_template = "The task result is not ready yet for job '{job_id}'" -class ClustersKeeperNotAvailableError(PydanticErrorMixin, SchedulerError): +class ClustersKeeperNotAvailableError(OsparcErrorMixin, SchedulerError): code = "computational_backend.clusters_keeper_not_available" msg_template = "clusters-keeper service is not available!" -class ComputationalBackendOnDemandNotReadyError(PydanticErrorMixin, SchedulerError): +class ComputationalBackendOnDemandNotReadyError(OsparcErrorMixin, SchedulerError): code = "computational_backend.on_demand_cluster.not_ready" msg_template = ( "The on demand computational cluster is not ready 'est. remaining time: {eta}'" @@ -268,16 +268,16 @@ class ComputationalBackendOnDemandNotReadyError(PydanticErrorMixin, SchedulerErr # # SCHEDULER/CLUSTER ERRORS # -class ClusterNotFoundError(PydanticErrorMixin, SchedulerError): +class ClusterNotFoundError(OsparcErrorMixin, SchedulerError): code = "cluster.not_found" msg_template = "The cluster '{cluster_id}' not found" -class ClusterAccessForbiddenError(PydanticErrorMixin, SchedulerError): +class ClusterAccessForbiddenError(OsparcErrorMixin, SchedulerError): msg_template = "Insufficient rights to access cluster '{cluster_id}'" -class ClusterInvalidOperationError(PydanticErrorMixin, SchedulerError): +class ClusterInvalidOperationError(OsparcErrorMixin, SchedulerError): msg_template = "Invalid operation on cluster '{cluster_id}'" @@ -286,24 +286,24 @@ class ClusterInvalidOperationError(PydanticErrorMixin, SchedulerError): # -class DaskClientRequestError(PydanticErrorMixin, SchedulerError): +class DaskClientRequestError(OsparcErrorMixin, SchedulerError): code = "dask_client.request.error" msg_template = ( "The dask client to cluster on '{endpoint}' did an invalid request '{error}'" ) -class DaskClusterError(PydanticErrorMixin, SchedulerError): +class DaskClusterError(OsparcErrorMixin, SchedulerError): code = "cluster.error" msg_template = "The dask cluster on '{endpoint}' encountered an error: '{error}'" -class DaskGatewayServerError(PydanticErrorMixin, SchedulerError): +class DaskGatewayServerError(OsparcErrorMixin, SchedulerError): code = "gateway.error" msg_template = "The dask gateway on '{endpoint}' encountered an error: '{error}'" -class DaskClientAcquisisitonError(PydanticErrorMixin, SchedulerError): +class DaskClientAcquisisitonError(OsparcErrorMixin, SchedulerError): code = "dask_client.acquisition.error" msg_template = ( "The dask client to cluster '{cluster}' encountered an error '{error}'" From 7761829081b740709d519d4dd1285ef944d3f221 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 5 Nov 2024 21:27:05 +0100 Subject: [PATCH 008/121] fix config --- .../models/dynamic_services_scheduler.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 5711cec6eea..cbb2db5120a 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -139,9 +139,7 @@ def from_container(cls, container: dict[str, Any]) -> "DockerContainerInspect": id=container["Id"], ) - # TODO[pydantic]: The following keys were removed: `allow_mutation`. - # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information. - model_config = ConfigDict(ignored_types=(cached_property,), allow_mutation=False) + model_config = ConfigDict(ignored_types=(cached_property,), frozen=True) class ServiceRemovalState(BaseModel): From ce1082e28eab309fd08d825c7e3e961eedc14915 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 5 Nov 2024 22:05:14 +0100 Subject: [PATCH 009/121] fix deprecated --- .../api/routes/computations.py | 14 ++-- .../cli/_close_and_save_service.py | 4 +- .../simcore_service_director_v2/cli/_core.py | 4 +- .../modules/catalog.py | 6 +- .../modules/dask_client.py | 4 +- .../db/repositories/comp_tasks/_utils.py | 6 +- .../modules/db/repositories/users.py | 4 +- .../docker_service_specs/sidecar.py | 5 +- .../scheduler/_core/_events_user_services.py | 4 +- .../modules/instrumentation/_models.py | 4 +- .../modules/osparc_variables/_api_auth_rpc.py | 6 +- .../modules/projects_networks.py | 10 +-- .../modules/resource_usage_tracker_client.py | 7 +- .../utils/computations.py | 7 +- .../simcore_service_director_v2/utils/dask.py | 9 +-- .../utils/osparc_variables.py | 4 +- .../tests/integration/02/conftest.py | 9 +-- ...t_dynamic_sidecar_nodeports_integration.py | 4 +- .../director-v2/tests/integration/02/utils.py | 4 +- services/director-v2/tests/unit/conftest.py | 14 ++-- .../tests/unit/test_models_clusters.py | 9 +-- .../tests/unit/test_modules_dask_client.py | 22 +++--- ...s_db_repositories_services_environments.py | 4 +- ...dules_dynamic_sidecar_client_api_public.py | 4 +- ...es_dynamic_sidecar_docker_compose_specs.py | 10 +-- .../tests/unit/test_modules_notifier.py | 4 +- .../unit/test_modules_osparc_variables.py | 8 +- .../unit/with_dbs/test_api_route_clusters.py | 73 +++++++++---------- .../with_dbs/test_api_route_computations.py | 55 +++++++------- .../test_api_route_computations_tasks.py | 4 +- ...t_modules_comp_scheduler_dask_scheduler.py | 4 +- .../tests/unit/with_dbs/test_utils_dask.py | 18 ++--- 32 files changed, 163 insertions(+), 181 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 72bdf37e6c7..f9cf639bd48 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -35,7 +35,7 @@ from models_library.services import ServiceKeyVersion from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from servicelib.async_utils import run_sequentially_in_context from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RabbitMQRPCClient @@ -399,13 +399,11 @@ async def create_computation( # noqa: PLR0913 # pylint:disable=too-many-positi pipeline_details=await compute_pipeline_details( complete_dag, minimal_computational_dag, comp_tasks ), - url=parse_obj_as( - AnyHttpUrl, + url=TypeAdapter(AnyHttpUrl).validate_python( f"{request.url}/{computation.project_id}?user_id={computation.user_id}", ), stop_url=( - parse_obj_as( - AnyHttpUrl, + TypeAdapter(AnyHttpUrl).validate_python( f"{request.url}/{computation.project_id}:stop?user_id={computation.user_id}", ) if computation.start_pipeline @@ -510,9 +508,9 @@ async def get_computation( id=project_id, state=pipeline_state, pipeline_details=pipeline_details, - url=parse_obj_as(AnyHttpUrl, f"{request.url}"), + url=TypeAdapter(AnyHttpUrl).validate_python(f"{request.url}"), stop_url=( - parse_obj_as(AnyHttpUrl, f"{self_url}:stop?user_id={user_id}") + TypeAdapter(AnyHttpUrl).validate_python(f"{self_url}:stop?user_id={user_id}") if pipeline_state.is_running() else None ), @@ -588,7 +586,7 @@ async def stop_computation( pipeline_details=await compute_pipeline_details( complete_dag, pipeline_dag, tasks ), - url=parse_obj_as(AnyHttpUrl, f"{request.url}"), + url=TypeAdapter(AnyHttpUrl).validate_python(f"{request.url}"), stop_url=None, iteration=last_run.iteration if last_run else None, cluster_id=last_run.cluster_id if last_run else None, diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py b/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py index be75694f55c..2f04477f06a 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py @@ -5,7 +5,7 @@ import rich from fastapi import FastAPI from models_library.projects_nodes_io import NodeID -from pydantic import AnyHttpUrl, PositiveFloat, parse_obj_as +from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter from rich.progress import ( BarColumn, Progress, @@ -106,7 +106,7 @@ async def async_close_and_save_service( client = Client( app=app, async_client=thin_dv2_localhost_client.client, - base_url=parse_obj_as(AnyHttpUrl, thin_dv2_localhost_client.BASE_ADDRESS), + base_url=TypeAdapter(AnyHttpUrl).validate_python(thin_dv2_localhost_client.BASE_ADDRESS), ) if not skip_container_removal: diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py index 893aed2504e..bcee8a446e4 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py @@ -12,7 +12,7 @@ from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceType from models_library.services_enums import ServiceBootType, ServiceState -from pydantic import AnyHttpUrl, BaseModel, PositiveInt, parse_obj_as +from pydantic import AnyHttpUrl, BaseModel, PositiveInt, TypeAdapter from rich.live import Live from rich.table import Table from servicelib.services_utils import get_service_from_key @@ -58,7 +58,7 @@ def _get_dynamic_sidecar_endpoint( dynamic_sidecar_names = DynamicSidecarNamesHelper.make(NodeID(node_id)) hostname = dynamic_sidecar_names.service_name_dynamic_sidecar port = settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_PORT - url: AnyHttpUrl = parse_obj_as(AnyHttpUrl, f"http://{hostname}:{port}") # NOSONAR + url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python(f"http://{hostname}:{port}") # NOSONAR return url diff --git a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py index f5e378afa43..2a064d14642 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py @@ -8,7 +8,7 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.services_resources import ServiceResourcesDict from models_library.users import UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from settings_library.catalog import CatalogSettings from ..utils.client_decorators import handle_errors, handle_retry @@ -90,8 +90,8 @@ async def get_service_resources( ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: - json_response: ServiceResourcesDict = parse_obj_as( - ServiceResourcesDict, resp.json() + json_response: ServiceResourcesDict = TypeAdapter(ServiceResourcesDict).validate_python( + resp.json() ) return json_response raise HTTPException(status_code=resp.status_code, detail=resp.content) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py index fcba4ad1fd3..e28e48f82f7 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py @@ -48,7 +48,7 @@ from models_library.projects_nodes_io import NodeID from models_library.resource_tracker import HardwareInfo from models_library.users import UserID -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from pydantic.networks import AnyUrl from servicelib.logging_utils import log_catch from settings_library.s3 import S3Settings @@ -583,5 +583,5 @@ def _get_worker_used_resources( assert dashboard_link # nosec return ClusterDetails( scheduler=Scheduler(status=scheduler_status, **scheduler_info), - dashboard_link=parse_obj_as(AnyUrl, dashboard_link), + dashboard_link=TypeAdapter(AnyUrl).validate_python(dashboard_link), ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index a33f689e9da..bc43a6badbf 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -36,7 +36,7 @@ ) from models_library.users import UserID from models_library.wallets import ZERO_CREDITS, WalletInfo -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.rabbitmq import ( RabbitMQRPCClient, RemoteMethodNotRegisteredError, @@ -174,7 +174,7 @@ async def _generate_task_image( } project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) project_node = await project_nodes_repo.get(connection, node_id=node_id) - node_resources = parse_obj_as(ServiceResourcesDict, project_node.required_resources) + node_resources = TypeAdapter(ServiceResourcesDict).validate_python(project_node.required_resources) if not node_resources: node_resources = await catalog_client.get_service_resources( user_id, node.key, node.version @@ -287,7 +287,7 @@ def _by_type_name(ec2: EC2InstanceTypeGet) -> bool: # less memory than the machine theoretical amount project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) node = await project_nodes_repo.get(connection, node_id=node_id) - node_resources = parse_obj_as(ServiceResourcesDict, node.required_resources) + node_resources = TypeAdapter(ServiceResourcesDict).validate_python(node.required_resources) if DEFAULT_SINGLE_SERVICE_NAME in node_resources: image_resources: ImageResources = node_resources[ DEFAULT_SINGLE_SERVICE_NAME diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py index 94f17b90295..434e523965c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py @@ -1,5 +1,5 @@ from models_library.users import UserID -from pydantic import EmailStr, parse_obj_as +from pydantic import EmailStr, TypeAdapter from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.utils_users import UsersRepo @@ -10,7 +10,7 @@ class UsersRepository(BaseRepository): async def get_user_email(self, user_id: UserID) -> EmailStr: async with self.db_engine.acquire() as conn: email = await UsersRepo.get_email(conn, user_id) - return parse_obj_as(EmailStr, email) + return TypeAdapter(EmailStr).validate_python(email) async def get_user_role(self, user_id: UserID) -> UserRole: async with self.db_engine.acquire() as conn: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index 001e549bf57..7e7428b195a 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -15,7 +15,7 @@ ) from models_library.resource_tracker import HardwareInfo from models_library.service_settings_labels import SimcoreServiceSettingsLabel -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager from servicelib.utils import unused_port @@ -471,8 +471,7 @@ async def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: if hardware_info and len(hardware_info.aws_ec2_instances) == 1: ec2_instance_type: str = hardware_info.aws_ec2_instances[0] placement_constraints.append( - parse_obj_as( - DockerPlacementConstraint, + TypeAdapter(DockerPlacementConstraint).validate_python( f"node.labels.{DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY}=={ec2_instance_type}", ) ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py index f708c1cb22c..f8416b4809b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py @@ -7,7 +7,7 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import ServiceKeyVersion, ServiceVersion from models_library.services_creation import CreateServiceMetricsAdditionalParams -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.fastapi.long_running_tasks.client import TaskId from tenacity import RetryError from tenacity.asyncio import AsyncRetrying @@ -168,7 +168,7 @@ async def progress_create_containers( project_name=project_name, node_name=node_name, service_key=scheduler_data.key, - service_version=parse_obj_as(ServiceVersion, scheduler_data.version), + service_version=TypeAdapter(ServiceVersion).validate_python(scheduler_data.version), service_resources=scheduler_data.service_resources, service_additional_metadata={}, ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py b/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py index 7407885af31..5459c67446b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/instrumentation/_models.py @@ -2,7 +2,7 @@ from typing import Final from prometheus_client import CollectorRegistry, Histogram -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.instrumentation import MetricsBase, get_metrics_namespace from ..._meta import PROJECT_NAME @@ -31,7 +31,7 @@ _RATE_BPS_BUCKETS: Final[tuple[float, ...]] = tuple( - parse_obj_as(ByteSize, f"{m}MiB") + TypeAdapter(ByteSize).validate_python(f"{m}MiB") for m in ( 1, 30, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py index d86f09ec9c2..c9edc8c0f1c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py @@ -6,7 +6,7 @@ from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from ..rabbitmq import get_rabbitmq_rpc_client @@ -26,10 +26,10 @@ async def get_or_create_api_key_and_secret( rpc_client = get_rabbitmq_rpc_client(app) result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, - parse_obj_as(RPCMethodName, "get_or_create_api_keys"), + TypeAdapter(RPCMethodName).validate_python("get_or_create_api_keys"), product_name=product_name, user_id=user_id, name=name, expiration=expiration, ) - return ApiKeyGet.parse_obj(result) + return ApiKeyGet.model_validate(result) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py index cba005a92ae..ed64923d3c6 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py @@ -17,7 +17,7 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import ServiceKeyVersion from models_library.users import UserID -from pydantic import ValidationError, parse_obj_as +from pydantic import TypeAdapter, ValidationError from servicelib.rabbitmq import RabbitMQClient from servicelib.utils import logged_gather @@ -45,7 +45,7 @@ class _ToAdd(NamedTuple): def _network_name(project_id: ProjectID, user_defined: str) -> DockerNetworkName: network_name = f"{PROJECT_NETWORK_PREFIX}_{project_id}_{user_defined}" - return parse_obj_as(DockerNetworkName, network_name) + return TypeAdapter(DockerNetworkName).validate_python(network_name) async def requires_dynamic_sidecar( @@ -184,10 +184,10 @@ async def _get_networks_with_aliases_for_default_network( be on the same network. Return an updated version of the projects_networks """ - new_networks_with_aliases: NetworksWithAliases = NetworksWithAliases.parse_obj({}) + new_networks_with_aliases: NetworksWithAliases = NetworksWithAliases.model_validate({}) default_network = _network_name(project_id, "default") - new_networks_with_aliases[default_network] = ContainerAliases.parse_obj({}) + new_networks_with_aliases[default_network] = ContainerAliases.model_validate({}) for node_uuid, node_content in new_workbench.items(): # only add dynamic-sidecar nodes @@ -200,7 +200,7 @@ async def _get_networks_with_aliases_for_default_network( # only add if network label is valid, otherwise it will be skipped try: - network_alias = parse_obj_as(DockerNetworkAlias, node_content.label) + network_alias = TypeAdapter(DockerNetworkAlias).validate_python(node_content.label) except ValidationError: message = LoggerRabbitMessage( user_id=user_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py index 2c546ea3d84..f1b4280bdcc 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py @@ -23,7 +23,6 @@ ) from models_library.services import ServiceKey, ServiceVersion from models_library.wallets import WalletID -from pydantic import parse_obj_as from ..core.errors import PricingPlanUnitNotFoundError from ..core.settings import AppSettings @@ -92,7 +91,7 @@ async def get_default_service_pricing_plan( raise PricingPlanUnitNotFoundError(msg) response.raise_for_status() - return parse_obj_as(PricingPlanGet, response.json()) + return PricingPlanGet.model_validate(response.json()) async def get_default_pricing_and_hardware_info( self, @@ -130,7 +129,7 @@ async def get_pricing_unit( }, ) response.raise_for_status() - return parse_obj_as(PricingUnitGet, response.json()) + return PricingUnitGet.model_validate(response.json()) async def get_wallet_credits( self, @@ -142,7 +141,7 @@ async def get_wallet_credits( params={"product_name": product_name, "wallet_id": wallet_id}, ) response.raise_for_status() - return parse_obj_as(WalletTotalCredits, response.json()) + return WalletTotalCredits.model_validate(response.json()) # # app diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index 0abbc18f593..56fb24170ef 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -6,7 +6,6 @@ from models_library.services import ServiceKeyVersion from models_library.services_regex import SERVICE_KEY_RE from models_library.users import UserID -from pydantic import parse_obj_as from servicelib.utils import logged_gather from ..models.comp_tasks import CompTaskAtDB @@ -123,7 +122,7 @@ async def find_deprecated_tasks( ) ) service_key_version_to_details = { - ServiceKeyVersion.construct( + ServiceKeyVersion.model_construct( key=details["key"], version=details["version"] ): details for details in services_details @@ -132,9 +131,7 @@ async def find_deprecated_tasks( def _is_service_deprecated(service: dict[str, Any]) -> bool: if deprecation_date := service.get("deprecated"): - deprecation_date = parse_obj_as( - datetime.datetime, deprecation_date - ).replace(tzinfo=datetime.timezone.utc) + deprecation_date = datetime.datetime.fromisoformat(deprecation_date).replace(tzinfo=datetime.UTC) is_deprecated: bool = today > deprecation_date return is_deprecated return False diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index 452f1ba50a9..422eb52b210 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -30,7 +30,7 @@ from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, ValidationError, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter, ValidationError from servicelib.logging_utils import log_catch, log_context from simcore_sdk import node_ports_v2 from simcore_sdk.node_ports_common.exceptions import ( @@ -326,8 +326,7 @@ def compute_task_labels( memory_limit=node_requirements.ram, cpu_limit=node_requirements.cpu, ).to_simcore_runtime_docker_labels() - return standard_simcore_labels | parse_obj_as( - ContainerLabelsDict, + return standard_simcore_labels | TypeAdapter(ContainerLabelsDict).validate_python( { DockerLabelKey.from_key(k): f"{v}" for k, v in run_metadata.items() @@ -552,8 +551,8 @@ def _to_human_readable_resource_values(resources: dict[str, Any]) -> dict[str, A for res_name, res_value in resources.items(): if "RAM" in res_name: try: - human_readable_resources[res_name] = parse_obj_as( - ByteSize, res_value + human_readable_resources[res_name] = TypeAdapter(ByteSize).validate_python( + res_value ).human_readable() except ValidationError: _logger.warning( diff --git a/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py b/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py index 0dfef24cfef..5a3321510e6 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py @@ -4,7 +4,7 @@ from typing import Any, Final, NamedTuple, TypeAlias from models_library.utils.specs_substitution import SubstitutionValue -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from servicelib.utils import logged_gather ContextDict: TypeAlias = dict[str, Any] @@ -89,7 +89,7 @@ def copy( return {k: self._variables_getters[k] for k in selection} -_HANDLERS_TIMEOUT: Final[NonNegativeInt] = parse_obj_as(NonNegativeInt, 4) +_HANDLERS_TIMEOUT: Final[NonNegativeInt] = TypeAdapter(NonNegativeInt).validate_python(4) async def resolve_variables_from_context( diff --git a/services/director-v2/tests/integration/02/conftest.py b/services/director-v2/tests/integration/02/conftest.py index 6bde0a5d6f5..1cc8e4fd64e 100644 --- a/services/director-v2/tests/integration/02/conftest.py +++ b/services/director-v2/tests/integration/02/conftest.py @@ -5,6 +5,7 @@ from uuid import uuid4 import aiodocker +from pydantic import TypeAdapter import pytest from models_library.api_schemas_resource_usage_tracker.pricing_plans import ( PricingPlanGet, @@ -14,7 +15,6 @@ ServiceResourcesDict, ServiceResourcesDictHelpers, ) -from pydantic import parse_obj_as from pytest_mock.plugin import MockerFixture @@ -64,7 +64,7 @@ def mock_projects_networks_repository(mocker: MockerFixture) -> None: "simcore_service_director_v2.modules.db.repositories." "projects_networks.ProjectsNetworksRepository.get_projects_networks" ), - return_value=ProjectsNetworks.parse_obj( + return_value=ProjectsNetworks.model_validate( {"project_uuid": uuid4(), "networks_with_aliases": {}} ), ) @@ -72,8 +72,7 @@ def mock_projects_networks_repository(mocker: MockerFixture) -> None: @pytest.fixture def service_resources() -> ServiceResourcesDict: - return parse_obj_as( - ServiceResourcesDict, + return TypeAdapter(ServiceResourcesDict).validate_python( ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) @@ -81,7 +80,7 @@ def service_resources() -> ServiceResourcesDict: @pytest.fixture def mock_resource_usage_tracker(mocker: MockerFixture) -> None: base_module = "simcore_service_director_v2.modules.resource_usage_tracker_client" - service_pricing_plan = PricingPlanGet.parse_obj( + service_pricing_plan = PricingPlanGet.model_validate( PricingPlanGet.model_config["json_schema_extra"]["examples"][1] ) for unit in service_pricing_plan.pricing_units: diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 17d3fe4bcca..b3825340fd1 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -47,7 +47,7 @@ from models_library.projects_pipeline import PipelineDetails from models_library.projects_state import RunningState from models_library.users import UserID -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.host import get_localhost_ip from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -840,7 +840,7 @@ async def _debug_progress_callback( Client( app=initialized_app, async_client=director_v2_client, - base_url=parse_obj_as(AnyHttpUrl, f"{director_v2_client.base_url}"), + base_url=TypeAdapter(AnyHttpUrl).validate_python(f"{director_v2_client.base_url}"), ), task_id, task_timeout=60, diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index 0c5f10c07bd..db99170d9ed 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -20,7 +20,7 @@ ServiceResourcesDictHelpers, ) from models_library.users import UserID -from pydantic import PositiveInt, parse_obj_as +from pydantic import PositiveInt, TypeAdapter from pytest_simcore.helpers.host import get_localhost_ip from servicelib.common_headers import ( X_DYNAMIC_SIDECAR_REQUEST_DNS, @@ -303,7 +303,7 @@ async def _get_service_resources( url = f"{catalog_url}/v0/services/{encoded_key}/{service_version}/resources" async with httpx.AsyncClient() as client: response = await client.get(f"{url}") - return parse_obj_as(ServiceResourcesDict, response.json()) + return TypeAdapter(ServiceResourcesDict).validate_python(response.json()) async def _handle_redirection( diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index 2e53bf1fab8..76ecd742510 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -10,6 +10,7 @@ from unittest import mock import aiodocker +from pydantic import TypeAdapter import pytest import respx from faker import Faker @@ -27,7 +28,6 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import RunID, ServiceKey, ServiceKeyVersion, ServiceVersion from models_library.services_enums import ServiceState -from pydantic import parse_obj_as from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.s3 import S3Settings @@ -52,16 +52,16 @@ def simcore_services_network_name() -> str: @pytest.fixture def simcore_service_labels() -> SimcoreServiceLabels: - simcore_service_labels = SimcoreServiceLabels.parse_obj( + simcore_service_labels = SimcoreServiceLabels.model_validate( SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][1] ) - simcore_service_labels.callbacks_mapping = parse_obj_as(CallbacksMapping, {}) + simcore_service_labels.callbacks_mapping = CallbacksMapping.model_validate({}) return simcore_service_labels @pytest.fixture def dynamic_service_create() -> DynamicServiceCreate: - return DynamicServiceCreate.parse_obj( + return DynamicServiceCreate.model_validate( DynamicServiceCreate.model_config["json_schema_extra"]["example"] ) @@ -211,8 +211,8 @@ def mocked_storage_service_api( @pytest.fixture def mock_service_key_version() -> ServiceKeyVersion: return ServiceKeyVersion( - key=parse_obj_as(ServiceKey, "simcore/services/dynamic/myservice"), - version=parse_obj_as(ServiceVersion, "1.4.5"), + key=TypeAdapter(ServiceKey).validate_python("simcore/services/dynamic/myservice"), + version=TypeAdapter(ServiceVersion).validate_python("1.4.5"), ) @@ -221,7 +221,7 @@ def fake_service_specifications(faker: Faker) -> dict[str, Any]: # the service specifications follow the Docker service creation available # https://docs.docker.com/engine/api/v1.41/#operation/ServiceCreate return { - "sidecar": DockerServiceSpec.parse_obj( + "sidecar": DockerServiceSpec.model_validate( { "Labels": {"label_one": faker.pystr(), "label_two": faker.pystr()}, "TaskTemplate": { diff --git a/services/director-v2/tests/unit/test_models_clusters.py b/services/director-v2/tests/unit/test_models_clusters.py index 0a5d29283bb..ac65d24609a 100644 --- a/services/director-v2/tests/unit/test_models_clusters.py +++ b/services/director-v2/tests/unit/test_models_clusters.py @@ -13,7 +13,7 @@ WorkerMetrics, ) from models_library.clusters import ClusterTypeInModel -from pydantic import BaseModel, parse_obj_as +from pydantic import BaseModel, TypeAdapter from simcore_postgres_database.models.clusters import ClusterType @@ -63,11 +63,10 @@ def test_worker_constructor_corrects_negative_used_resources(faker: Faker): worker = Worker( id=faker.pyint(min_value=1), name=faker.name(), - resources=parse_obj_as(AvailableResources, {}), - used_resources=parse_obj_as(UsedResources, {"CPU": -0.0000234}), + resources=TypeAdapter(AvailableResources).validate_python({}), + used_resources=TypeAdapter(UsedResources).validate_python({"CPU": -0.0000234}), memory_limit=faker.pyint(min_value=1), - metrics=parse_obj_as( - WorkerMetrics, + metrics=WorkerMetrics.model_validate( { "cpu": faker.pyfloat(min_value=0), "memory": faker.pyint(min_value=0), diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index 15720935aae..bc053ed20bf 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -54,8 +54,7 @@ from models_library.projects_nodes_io import NodeID from models_library.resource_tracker import HardwareInfo from models_library.users import UserID -from pydantic import AnyUrl, ByteSize, SecretStr -from pydantic.tools import parse_obj_as +from pydantic import AnyUrl, ByteSize, SecretStr, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.background_task import periodic_task @@ -164,7 +163,7 @@ async def factory() -> DaskClient: client = await DaskClient.create( app=minimal_app, settings=minimal_app.state.settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND, - endpoint=parse_obj_as(AnyUrl, dask_spec_local_cluster.scheduler_address), + endpoint=TypeAdapter(AnyUrl).validate_python(dask_spec_local_cluster.scheduler_address), authentication=NoAuthentication(), tasks_file_link_type=tasks_file_link_type, cluster_type=ClusterTypeInModel.ON_PREMISE, @@ -205,7 +204,7 @@ async def factory() -> DaskClient: client = await DaskClient.create( app=minimal_app, settings=minimal_app.state.settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND, - endpoint=parse_obj_as(AnyUrl, local_dask_gateway_server.address), + endpoint=TypeAdapter(AnyUrl).validate_python(local_dask_gateway_server.address), authentication=SimpleAuthentication( username="pytest_user", password=SecretStr(local_dask_gateway_server.password), @@ -299,7 +298,7 @@ def cpu_image(node_id: NodeID) -> ImageParams: tag="1.5.5", node_requirements=NodeRequirements( CPU=1, - RAM=parse_obj_as(ByteSize, "128 MiB"), + RAM=TypeAdapter(ByteSize).validate_python("128 MiB"), GPU=None, ), ) # type: ignore @@ -327,7 +326,7 @@ def gpu_image(node_id: NodeID) -> ImageParams: node_requirements=NodeRequirements( CPU=1, GPU=1, - RAM=parse_obj_as(ByteSize, "256 MiB"), + RAM=TypeAdapter(ByteSize).validate_python("256 MiB"), ), ) # type: ignore return ImageParams( @@ -367,15 +366,15 @@ def _mocked_node_ports(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_input_data", - return_value=TaskInputData.parse_obj({}), + return_value=TaskInputData.model_validate({}), ) mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_output_data_schema", - return_value=TaskOutputDataSchema.parse_obj({}), + return_value=TaskOutputDataSchema.model_validate({}), ) mocker.patch( "simcore_service_director_v2.modules.dask_client.dask_utils.compute_service_log_file_upload_link", - return_value=parse_obj_as(AnyUrl, "file://undefined"), + return_value=TypeAdapter(AnyUrl).validate_python("file://undefined"), ) @@ -470,8 +469,7 @@ def comp_run_metadata(faker: Faker) -> RunMetadataDict: @pytest.fixture def task_labels(comp_run_metadata: RunMetadataDict) -> ContainerLabelsDict: - return parse_obj_as( - ContainerLabelsDict, + return TypeAdapter(ContainerLabelsDict).validate_python( { k.replace("_", "-").lower(): v for k, v in comp_run_metadata.items() @@ -949,7 +947,7 @@ async def test_too_many_resources_send_computation_task( tag="1.4.5", node_requirements=NodeRequirements( CPU=10000000000000000, - RAM=parse_obj_as(ByteSize, "128 MiB"), + RAM=TypeAdapter(ByteSize).validate_python("128 MiB"), GPU=None, ), ) # type: ignore diff --git a/services/director-v2/tests/unit/test_modules_db_repositories_services_environments.py b/services/director-v2/tests/unit/test_modules_db_repositories_services_environments.py index 82e397bd3f1..32c39f416ee 100644 --- a/services/director-v2/tests/unit/test_modules_db_repositories_services_environments.py +++ b/services/director-v2/tests/unit/test_modules_db_repositories_services_environments.py @@ -1,5 +1,5 @@ from models_library.osparc_variable_identifier import OsparcVariableIdentifier -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_postgres_database.models.services_environments import VENDOR_SECRET_PREFIX @@ -8,4 +8,4 @@ def test_vendor_secret_names_are_osparc_environments(): # packages simcore_postgres_database and models_library which are indenpendent assert VENDOR_SECRET_PREFIX.endswith("_") - parse_obj_as(OsparcVariableIdentifier, f"${VENDOR_SECRET_PREFIX}FAKE_SECRET") + TypeAdapter(OsparcVariableIdentifier).validate_python(f"${VENDOR_SECRET_PREFIX}FAKE_SECRET") diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py index a38a5a06197..f003acf6ad0 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py @@ -11,7 +11,7 @@ from fastapi import FastAPI, status from httpx import HTTPError, Response from models_library.sidecar_volumes import VolumeCategory, VolumeStatus -from pydantic import AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.fastapi.http_client_thin import ClientHttpError, UnexpectedStatusError @@ -33,7 +33,7 @@ @pytest.fixture def dynamic_sidecar_endpoint() -> AnyHttpUrl: - return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") + return TypeAdapter(AnyHttpUrl).validate_python("http://missing-host:1111") @pytest.fixture diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py index d06444cf155..340c7ad3e44 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py @@ -6,6 +6,7 @@ from typing import Any from uuid import uuid4 +from pydantic import TypeAdapter import pytest import yaml from models_library.docker import to_simcore_runtime_docker_label_key @@ -21,7 +22,6 @@ ServiceResourcesDict, ) from models_library.users import UserID -from pydantic import parse_obj_as from servicelib.resources import CPU_RESOURCE_LIMIT_KEY, MEM_RESOURCE_LIMIT_KEY from simcore_service_director_v2.modules.dynamic_sidecar import docker_compose_specs @@ -74,8 +74,7 @@ def test_parse_and_export_of_compose_environment_section(): [ pytest.param( {"version": "2.3", "services": {DEFAULT_SINGLE_SERVICE_NAME: {}}}, - parse_obj_as( - ServiceResourcesDict, + TypeAdapter(ServiceResourcesDict).validate_python( { DEFAULT_SINGLE_SERVICE_NAME: { "image": "simcore/services/dynamic/jupyter-math:2.0.5", @@ -90,8 +89,7 @@ def test_parse_and_export_of_compose_environment_section(): ), pytest.param( {"version": "3.7", "services": {DEFAULT_SINGLE_SERVICE_NAME: {}}}, - parse_obj_as( - ServiceResourcesDict, + TypeAdapter(ServiceResourcesDict).validate_python( { DEFAULT_SINGLE_SERVICE_NAME: { "image": "simcore/services/dynamic/jupyter-math:2.0.5", @@ -200,7 +198,7 @@ def test_regression_service_has_no_reservations(): "version": "3.7", "services": {DEFAULT_SINGLE_SERVICE_NAME: {}}, } - service_resources: ServiceResourcesDict = parse_obj_as(ServiceResourcesDict, {}) + service_resources: ServiceResourcesDict = TypeAdapter(ServiceResourcesDict).validate_python({}) spec_before = deepcopy(service_spec) docker_compose_specs._update_resource_limits_and_reservations( diff --git a/services/director-v2/tests/unit/test_modules_notifier.py b/services/director-v2/tests/unit/test_modules_notifier.py index 46d0879cebc..cf6d8e1b01c 100644 --- a/services/director-v2/tests/unit/test_modules_notifier.py +++ b/services/director-v2/tests/unit/test_modules_notifier.py @@ -18,7 +18,7 @@ from models_library.projects_nodes_io import NodeID from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.utils import logged_gather @@ -111,7 +111,7 @@ def _get_on_no_more_credits_event( # emulates front-end receiving message async def on_no_more_credits(data): - assert parse_obj_as(ServiceNoMoreCredits, data) is not None + assert ServiceNoMoreCredits.model_validate(data) is not None on_event_spy = AsyncMock(wraps=on_no_more_credits) socketio_client.on(SOCKET_IO_SERVICE_NO_MORE_CREDITS_EVENT, on_event_spy) diff --git a/services/director-v2/tests/unit/test_modules_osparc_variables.py b/services/director-v2/tests/unit/test_modules_osparc_variables.py index 9ed659f00ad..635904292b8 100644 --- a/services/director-v2/tests/unit/test_modules_osparc_variables.py +++ b/services/director-v2/tests/unit/test_modules_osparc_variables.py @@ -21,7 +21,7 @@ from models_library.users import UserID from models_library.utils.specs_substitution import SubstitutionValue from models_library.utils.string_substitution import OSPARC_IDENTIFIER_PREFIX -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.faker_compose_specs import generate_fake_docker_compose from simcore_postgres_database.models.services_environments import VENDOR_SECRET_PREFIX @@ -48,8 +48,8 @@ def session_context(faker: Faker) -> ContextDict: return ContextDict( app=FastAPI(), - service_key=parse_obj_as(ServiceKey, "simcore/services/dynamic/foo"), - service_version=parse_obj_as(ServiceVersion, "1.2.3"), + service_key=TypeAdapter(ServiceKey).validate_python("simcore/services/dynamic/foo"), + service_version=TypeAdapter(ServiceVersion).validate_python("1.2.3"), compose_spec=generate_fake_docker_compose(faker), product_name=faker.word(), project_id=faker.uuid4(), @@ -101,7 +101,7 @@ async def request_user_email(app: FastAPI, user_id: UserID) -> SubstitutionValue # All values extracted from the context MUST be SubstitutionValue assert { - key: parse_obj_as(SubstitutionValue, value) for key, value in environs.items() + key: TypeAdapter(SubstitutionValue).validate_python(value) for key, value in environs.items() } for osparc_variable_name, context_name in [ diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py index 63457484613..65a080b90e7 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py @@ -30,7 +30,7 @@ ClusterAuthentication, SimpleAuthentication, ) -from pydantic import AnyHttpUrl, SecretStr, parse_obj_as +from pydantic import AnyHttpUrl, SecretStr, TypeAdapter from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.utils_encoders import create_json_encoder_wo_secrets from simcore_postgres_database.models.clusters import ClusterType, clusters @@ -94,7 +94,7 @@ async def test_list_clusters( # there is no cluster at the moment, the list shall contain the default cluster response = await async_client.get(list_clusters_url) assert response.status_code == status.HTTP_200_OK - returned_clusters_list = parse_obj_as(list[ClusterGet], response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python(response.json()) assert ( len(returned_clusters_list) == 1 ), f"no default cluster in {returned_clusters_list=}" @@ -109,7 +109,7 @@ async def test_list_clusters( response = await async_client.get(list_clusters_url) assert response.status_code == status.HTTP_200_OK - returned_clusters_list = parse_obj_as(list[ClusterGet], response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python(response.json()) assert ( len(returned_clusters_list) == NUM_CLUSTERS + 1 ) # the default cluster comes on top of the NUM_CLUSTERS @@ -121,7 +121,7 @@ async def test_list_clusters( user_2 = registered_user() response = await async_client.get(f"/v2/clusters?user_id={user_2['id']}") assert response.status_code == status.HTTP_200_OK - returned_clusters_list = parse_obj_as(list[ClusterGet], response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python(response.json()) assert ( len(returned_clusters_list) == 1 ), f"no default cluster in {returned_clusters_list=}" @@ -147,7 +147,7 @@ async def test_list_clusters( response = await async_client.get(f"/v2/clusters?user_id={user_2['id']}") assert response.status_code == status.HTTP_200_OK - user_2_clusters = parse_obj_as(list[ClusterGet], response.json()) + user_2_clusters = TypeAdapter(list[ClusterGet]).validate_python(response.json()) # we should find 3 clusters + the default cluster assert len(user_2_clusters) == 3 + 1 for name in [ @@ -187,7 +187,7 @@ async def test_get_cluster( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = parse_obj_as(ClusterGet, response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) assert returned_cluster assert the_cluster.dict(exclude={"authentication"}) == returned_cluster.dict( exclude={"authentication"} @@ -283,7 +283,7 @@ async def test_get_default_cluster( get_cluster_url = URL(f"/v2/clusters/default?user_id={user_1['id']}") response = await async_client.get(get_cluster_url) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = parse_obj_as(ClusterGet, response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) assert returned_cluster assert returned_cluster.id == 0 assert returned_cluster.name == "Default cluster" @@ -319,7 +319,7 @@ async def test_create_cluster( ), ) assert response.status_code == status.HTTP_201_CREATED, f"received: {response.text}" - created_cluster = parse_obj_as(ClusterGet, response.json()) + created_cluster = ClusterGet.model_validate(response.json()) assert created_cluster assert cluster_data.dict( @@ -371,7 +371,7 @@ async def test_update_own_cluster( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - original_cluster = parse_obj_as(ClusterGet, response.json()) + original_cluster = ClusterGet.model_validate(response.json()) # now we modify nothing response = await async_client.patch( @@ -383,11 +383,11 @@ async def test_update_own_cluster( ), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = parse_obj_as(ClusterGet, response.json()) - assert returned_cluster.dict() == original_cluster.dict() + returned_cluster = ClusterGet.model_validate(response.json()) + assert returned_cluster.model_dump() == original_cluster.model_dump() # modify some simple things - expected_modified_cluster = original_cluster.copy() + expected_modified_cluster = original_cluster.model_copy() for cluster_patch in [ ClusterPatch(name=faker.name()), ClusterPatch(description=faker.text()), @@ -407,13 +407,13 @@ async def test_update_own_cluster( json=jsonable_cluster_patch, ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = parse_obj_as(ClusterGet, response.json()) - expected_modified_cluster = expected_modified_cluster.copy( - update=cluster_patch.dict(**_PATCH_EXPORT) + returned_cluster = ClusterGet.model_validate(response.json()) + expected_modified_cluster = expected_modified_cluster.model_copy( + update=cluster_patch.model_dump(**_PATCH_EXPORT) ) - assert returned_cluster.dict( + assert returned_cluster.model_dump( exclude={"authentication": {"password"}} - ) == expected_modified_cluster.dict(exclude={"authentication": {"password"}}) + ) == expected_modified_cluster.model_dump(exclude={"authentication": {"password"}}) # we can change the access rights, the owner rights are always kept user_2 = registered_user() @@ -427,34 +427,34 @@ async def test_update_own_cluster( cluster_patch = ClusterPatch(accessRights={user_2["primary_gid"]: rights}) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", - json=cluster_patch.dict(**_PATCH_EXPORT), + json=cluster_patch.model_dump(**_PATCH_EXPORT), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = ClusterGet.parse_obj(response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) expected_modified_cluster.access_rights[user_2["primary_gid"]] = rights - assert returned_cluster.dict( + assert returned_cluster.model_dump( exclude={"authentication": {"password"}} - ) == expected_modified_cluster.dict(exclude={"authentication": {"password"}}) + ) == expected_modified_cluster.model_dump(exclude={"authentication": {"password"}}) # we can change the owner since we are admin cluster_patch = ClusterPatch(owner=user_2["primary_gid"]) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", json=json.loads( - cluster_patch.json( + cluster_patch.model_dump_json( **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) ) ), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - returned_cluster = ClusterGet.parse_obj(response.json()) + returned_cluster = ClusterGet.model_validate(response.json()) expected_modified_cluster.owner = user_2["primary_gid"] expected_modified_cluster.access_rights[ user_2["primary_gid"] ] = CLUSTER_ADMIN_RIGHTS - assert returned_cluster.dict( + assert returned_cluster.model_dump( exclude={"authentication": {"password"}} - ) == expected_modified_cluster.dict(exclude={"authentication": {"password"}}) + ) == expected_modified_cluster.model_dump(exclude={"authentication": {"password"}}) # we should not be able to reduce the rights of the new owner cluster_patch = ClusterPatch( @@ -463,7 +463,7 @@ async def test_update_own_cluster( response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", json=json.loads( - cluster_patch.json( + cluster_patch.model_dump_json( **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) ) ), @@ -487,7 +487,7 @@ async def test_update_default_cluster_fails( response = await async_client.patch( f"/v2/clusters/default?user_id={user_1['id']}", json=json.loads( - ClusterPatch().json( + ClusterPatch().model_dump_json( **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) ) ), @@ -545,7 +545,7 @@ async def test_update_another_cluster( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" - parse_obj_as(ClusterGet, response.json()) + ClusterGet.model_validate(response.json()) # let's try to modify stuff as we are user 2 for cluster_patch in [ @@ -582,7 +582,7 @@ async def test_update_another_cluster( response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}", json=json.loads( - cluster_patch.json( + cluster_patch.model_dump_json( **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch), ) @@ -603,7 +603,7 @@ async def test_update_another_cluster( response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}", json=json.loads( - cluster_patch.json( + cluster_patch.model_dump_json( **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch), ) @@ -729,14 +729,13 @@ async def test_ping_invalid_cluster_raises_422( # calling with correct data but non existing cluster also raises some_fake_cluster = ClusterPing( endpoint=faker.url(), - authentication=parse_obj_as( - ClusterAuthentication, cluster_simple_authentication() + authentication=TypeAdapter(ClusterAuthentication).validate_python(cluster_simple_authentication() ), ) response = await async_client.post( "/v2/clusters:ping", json=json.loads( - some_fake_cluster.json( + some_fake_cluster.model_dump_json( by_alias=True, encoder=create_json_encoder_wo_secrets(ClusterPing) ) ), @@ -751,16 +750,16 @@ async def test_ping_cluster( local_dask_gateway_server: DaskGatewayServer, ): valid_cluster = ClusterPing( - endpoint=parse_obj_as(AnyHttpUrl, local_dask_gateway_server.address), + endpoint=TypeAdapter(AnyHttpUrl).validate_python(local_dask_gateway_server.address), authentication=SimpleAuthentication( username="pytest_user", - password=parse_obj_as(SecretStr, local_dask_gateway_server.password), + password=TypeAdapter(SecretStr).validate_python(local_dask_gateway_server.password), ), ) response = await async_client.post( "/v2/clusters:ping", json=json.loads( - valid_cluster.json( + valid_cluster.model_dump_json( by_alias=True, encoder=create_json_encoder_wo_secrets(SimpleAuthentication), ) @@ -792,7 +791,7 @@ async def test_ping_specific_cluster( endpoint=local_dask_gateway_server.address, authentication=SimpleAuthentication( username="pytest_user", - password=parse_obj_as(SecretStr, local_dask_gateway_server.password), + password=TypeAdapter(SecretStr).validate_python(local_dask_gateway_server.password), ), ) for n in range(111) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 73a5ca6b447..73a9508b8d0 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -49,7 +49,7 @@ ) from models_library.utils.fastapi_encoders import jsonable_encoder from models_library.wallets import WalletInfo -from pydantic import AnyHttpUrl, ByteSize, PositiveInt, ValidationError, parse_obj_as +from pydantic import AnyHttpUrl, ByteSize, PositiveInt, TypeAdapter, ValidationError from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.rabbit import RabbitSettings @@ -117,8 +117,7 @@ def fake_service_extras() -> ServiceExtras: @pytest.fixture def fake_service_resources() -> ServiceResourcesDict: - return parse_obj_as( - ServiceResourcesDict, + return TypeAdapter(ServiceResourcesDict).validate_python( ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], ) @@ -422,7 +421,7 @@ def fake_ec2_cpus() -> PositiveInt: @pytest.fixture def fake_ec2_ram() -> ByteSize: - return parse_obj_as(ByteSize, "4GiB") + return TypeAdapter(ByteSize).validate_python("4GiB") @pytest.fixture @@ -575,7 +574,7 @@ async def test_create_computation_with_wallet( @pytest.mark.parametrize( "default_pricing_plan", [ - PricingPlanGet.construct( + PricingPlanGet.model_construct( **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] ) ], @@ -874,7 +873,7 @@ async def test_get_computation_from_empty_project( ) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_computation = ComputationGet( id=proj.uuid, @@ -882,8 +881,8 @@ async def test_get_computation_from_empty_project( pipeline_details=PipelineDetails( adjacency_list={}, node_states={}, progress=None ), - url=parse_obj_as( - AnyHttpUrl, f"{async_client.base_url.join(get_computation_url)}" + url=TypeAdapter(AnyHttpUrl).validate_python( + f"{async_client.base_url.join(get_computation_url)}" ), stop_url=None, result=None, @@ -893,7 +892,7 @@ async def test_get_computation_from_empty_project( stopped=None, submitted=None, ) - assert returned_computation.dict() == expected_computation.dict() + assert returned_computation.model_dump() == expected_computation.model_dump() async def test_get_computation_from_not_started_computation_task( @@ -923,14 +922,14 @@ async def test_get_computation_from_not_started_computation_task( comp_tasks = tasks(user=user, project=proj) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_computation = ComputationGet( id=proj.uuid, state=RunningState.NOT_STARTED, pipeline_details=PipelineDetails( - adjacency_list=parse_obj_as( - dict[NodeID, list[NodeID]], fake_workbench_adjacency + adjacency_list=TypeAdapter(dict[NodeID, list[NodeID]]).validate_python( + fake_workbench_adjacency ), progress=0, node_states={ @@ -948,8 +947,8 @@ async def test_get_computation_from_not_started_computation_task( if t.node_class == NodeClass.COMPUTATIONAL }, ), - url=parse_obj_as( - AnyHttpUrl, f"{async_client.base_url.join(get_computation_url)}" + url=TypeAdapter(AnyHttpUrl).validate_python( + f"{async_client.base_url.join(get_computation_url)}" ), stop_url=None, result=None, @@ -960,12 +959,12 @@ async def test_get_computation_from_not_started_computation_task( submitted=None, ) _CHANGED_FIELDS = {"submitted"} - assert returned_computation.dict( + assert returned_computation.model_dump( exclude=_CHANGED_FIELDS - ) == expected_computation.dict(exclude=_CHANGED_FIELDS) - assert returned_computation.dict( + ) == expected_computation.model_dump(exclude=_CHANGED_FIELDS) + assert returned_computation.model_dump( include=_CHANGED_FIELDS - ) != expected_computation.dict(include=_CHANGED_FIELDS) + ) != expected_computation.model_dump(include=_CHANGED_FIELDS) async def test_get_computation_from_published_computation_task( @@ -993,7 +992,7 @@ async def test_get_computation_from_published_computation_task( ) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text - returned_computation = ComputationGet.parse_obj(response.json()) + returned_computation = ComputationGet.model_validate(response.json()) assert returned_computation expected_stop_url = async_client.base_url.join( f"/v2/computations/{proj.uuid}:stop?user_id={user['id']}" @@ -1002,8 +1001,8 @@ async def test_get_computation_from_published_computation_task( id=proj.uuid, state=RunningState.PUBLISHED, pipeline_details=PipelineDetails( - adjacency_list=parse_obj_as( - dict[NodeID, list[NodeID]], fake_workbench_adjacency + adjacency_list=TypeAdapter(dict[NodeID, list[NodeID]]).validate_python( + fake_workbench_adjacency ), node_states={ t.node_id: NodeState( @@ -1021,10 +1020,10 @@ async def test_get_computation_from_published_computation_task( }, progress=0, ), - url=parse_obj_as( - AnyHttpUrl, f"{async_client.base_url.join(get_computation_url)}" + url=TypeAdapter(AnyHttpUrl).validate_python( + f"{async_client.base_url.join(get_computation_url)}" ), - stop_url=parse_obj_as(AnyHttpUrl, f"{expected_stop_url}"), + stop_url=TypeAdapter(AnyHttpUrl).validate_python(f"{expected_stop_url}"), result=None, iteration=1, cluster_id=DEFAULT_CLUSTER_ID, @@ -1034,9 +1033,9 @@ async def test_get_computation_from_published_computation_task( ) _CHANGED_FIELDS = {"submitted"} - assert returned_computation.dict( + assert returned_computation.model_dump( exclude=_CHANGED_FIELDS - ) == expected_computation.dict(exclude=_CHANGED_FIELDS) - assert returned_computation.dict( + ) == expected_computation.model_dump(exclude=_CHANGED_FIELDS) + assert returned_computation.model_dump( include=_CHANGED_FIELDS - ) != expected_computation.dict(include=_CHANGED_FIELDS) + ) != expected_computation.model_dump(include=_CHANGED_FIELDS) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py index 6f75f43c59f..c908f3daef0 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py @@ -21,7 +21,7 @@ from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from pydantic import parse_obj_as, parse_raw_as +from pydantic import parse_raw_as from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director_v2.core.settings import AppSettings @@ -197,7 +197,7 @@ async def test_get_tasks_outputs( assert resp.status_code == status.HTTP_200_OK - tasks_outputs = parse_obj_as(TasksOutputs, resp.json()) + tasks_outputs = TasksOutputs.model_validate(resp.json()) assert selection == set(tasks_outputs.nodes_outputs.keys()) outputs = tasks_outputs.nodes_outputs[node_id] diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index d15ab46a498..daf6df774cf 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -41,7 +41,7 @@ RabbitResourceTrackingStoppedMessage, ) from models_library.users import UserID -from pydantic import parse_obj_as, parse_raw_as +from pydantic import TypeAdapter, parse_raw_as from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQClient @@ -144,7 +144,7 @@ async def _assert_comp_tasks_db( & (comp_tasks.c.node_id.in_([f"{n}" for n in task_ids])) ) # there is only one entry ) - tasks = parse_obj_as(list[CompTaskAtDB], await result.fetchall()) + tasks = TypeAdapter(list[CompTaskAtDB]).validate_python(await result.fetchall()) assert all( t.state == expected_state for t in tasks ), f"expected state: {expected_state}, found: {[t.state for t in tasks]}" diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index d8d95f93572..e6aa8de204b 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -36,9 +36,8 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimCoreFileLink, SimcoreS3FileID from models_library.users import UserID -from pydantic import ByteSize +from pydantic import ByteSize, TypeAdapter from pydantic.networks import AnyUrl -from pydantic.tools import parse_obj_as from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_sdk.node_ports_v2 import FileLinkType @@ -95,16 +94,15 @@ async def mocked_node_ports_filemanager_fcts( 0, FileUploadSchema( urls=[ - parse_obj_as( - AnyUrl, + TypeAdapter(AnyUrl).validate_python( f"{URL(faker.uri()).with_scheme(choice(tasks_file_link_scheme))}", # noqa: S311 ) ], - chunk_size=parse_obj_as(ByteSize, "5GiB"), + chunk_size=TypeAdapter(ByteSize).validate_python("5GiB"), links=FileUploadLinks( - abort_upload=parse_obj_as(AnyUrl, "https://www.fakeabort.com"), - complete_upload=parse_obj_as( - AnyUrl, "https://www.fakecomplete.com" + abort_upload=TypeAdapter(AnyUrl).validate_python("https://www.fakeabort.com"), + complete_upload=TypeAdapter(AnyUrl).validate_python( + "https://www.fakecomplete.com" ), ), ), @@ -234,7 +232,7 @@ def fake_task_output_data( ) for key, value in fake_io_data.items() } - data = parse_obj_as(TaskOutputData, converted_data) + data = TypeAdapter(TaskOutputData).validate_python(converted_data) assert data return data @@ -334,7 +332,7 @@ def return_fake_input_value(*args, **kwargs): fake_inputs.values(), fake_io_schema.values(), strict=True ): if value_type["type"] == "data:*/*": - yield parse_obj_as(AnyUrl, faker.url()) + yield TypeAdapter(AnyUrl).validate_python(faker.url()) else: yield value From de5b8d496ba41f56472d23bae01c76024ab77b33 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 5 Nov 2024 22:16:51 +0100 Subject: [PATCH 010/121] fix deprecated --- .../api/routes/dynamic_services.py | 2 +- .../models/dynamic_services_scheduler.py | 2 +- .../modules/db/repositories/clusters.py | 8 ++++---- .../modules/db/repositories/comp_pipelines.py | 4 ++-- .../modules/db/repositories/comp_tasks/_utils.py | 6 +++--- .../modules/db/repositories/projects_networks.py | 2 +- .../db/repositories/user_preferences_frontend.py | 2 +- .../modules/director_v0.py | 6 +++--- .../modules/dynamic_sidecar/api_client/_public.py | 2 +- .../docker_service_specs/settings.py | 6 +++--- .../docker_service_specs/sidecar.py | 2 +- .../scheduler/_core/_event_create_sidecars.py | 4 ++-- .../scheduler/_core/_events_utils.py | 2 +- .../scheduler/_core/_scheduler_utils.py | 2 +- .../modules/projects_networks.py | 4 ++-- .../simcore_service_director_v2/modules/storage.py | 2 +- .../src/simcore_service_director_v2/utils/dask.py | 4 ++-- services/director-v2/tests/conftest.py | 4 ++-- .../director-v2/tests/helpers/shared_comp_utils.py | 2 +- .../tests/integration/01/test_computation_api.py | 4 ++-- .../test_dynamic_sidecar_nodeports_integration.py | 6 +++--- services/director-v2/tests/integration/conftest.py | 2 +- .../tests/unit/test_models_dynamic_services.py | 2 +- .../tests/unit/test_modules_dask_client.py | 14 +++++++------- .../tests/unit/test_modules_dask_clients_pool.py | 2 +- .../tests/unit/test_modules_project_networks.py | 6 +++--- .../tests/unit/test_utils_comp_scheduler.py | 2 +- services/director-v2/tests/unit/test_utils_dags.py | 2 +- .../director-v2/tests/unit/with_dbs/conftest.py | 2 +- .../tests/unit/with_dbs/test_api_route_clusters.py | 2 +- .../with_dbs/test_api_route_clusters_details.py | 6 +++--- .../unit/with_dbs/test_api_route_computations.py | 4 ++-- .../with_dbs/test_api_route_dynamic_services.py | 8 ++++---- .../director-v2/tests/unit/with_dbs/test_cli.py | 2 +- .../test_modules_comp_scheduler_dask_scheduler.py | 10 +++++----- .../test_modules_dynamic_sidecar_docker_api.py | 4 ++-- ...modules_dynamic_sidecar_docker_service_specs.py | 14 +++++++------- .../tests/unit/with_dbs/test_utils_dask.py | 6 +++--- 38 files changed, 82 insertions(+), 82 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py index 743d7ae2a66..43fcf83e23d 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py @@ -286,7 +286,7 @@ async def service_retrieve_data_on_ports( ) # validate and return - return RetrieveDataOutEnveloped.parse_obj(response.json()) + return RetrieveDataOutEnveloped.model_validate(response.json()) @router.post( diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index cbb2db5120a..280710fda3d 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -275,7 +275,7 @@ def compose_spec_submitted(self) -> bool: ) instrumentation: ServicesInstrumentation = Field( - default_factory=lambda: ServicesInstrumentation.parse_obj({}), + default_factory=lambda: ServicesInstrumentation.model_validate({}), description="keeps track times for various operations", ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/clusters.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/clusters.py index 214c03b9dca..30381110173 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/clusters.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/clusters.py @@ -108,9 +108,9 @@ async def _compute_user_access_rights( ) and (primary_grp_rights := cluster.access_rights.get(primary_group_row.gid)): return primary_grp_rights - solved_rights = CLUSTER_NO_RIGHTS.dict() + solved_rights = CLUSTER_NO_RIGHTS.model_dump() for group_row in filter(lambda ugrp: ugrp[1] != GroupType.PRIMARY, user_groups): - grp_access = cluster.access_rights.get(group_row.gid, CLUSTER_NO_RIGHTS).dict() + grp_access = cluster.access_rights.get(group_row.gid, CLUSTER_NO_RIGHTS).model_dump() for operation in ["read", "write", "delete"]: solved_rights[operation] |= grp_access[operation] return ClusterAccessRights(**solved_rights) @@ -250,14 +250,14 @@ async def update_cluster( # pylint: disable=too-many-branches if updated_cluster.access_rights: for grp, rights in resolved_access_rights.items(): insert_stmt = pg_insert(cluster_to_groups).values( - **rights.dict(by_alias=True), gid=grp, cluster_id=the_cluster.id + **rights.model_dump(by_alias=True), gid=grp, cluster_id=the_cluster.id ) on_update_stmt = insert_stmt.on_conflict_do_update( index_elements=[ cluster_to_groups.c.cluster_id, cluster_to_groups.c.gid, ], - set_=rights.dict(by_alias=True), + set_=rights.model_dump(by_alias=True), ) await conn.execute(on_update_stmt) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py index 3c24694c2fd..da273c79159 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py @@ -39,14 +39,14 @@ async def upsert_pipeline( dag_adjacency_list=nx.to_dict_of_lists(dag_graph), state=RunningState.PUBLISHED if publish else RunningState.NOT_STARTED, ) - insert_stmt = insert(comp_pipeline).values(**pipeline_at_db.dict(by_alias=True)) + insert_stmt = insert(comp_pipeline).values(**pipeline_at_db.model_dump(by_alias=True)) # FIXME: This is not a nice thing. this part of the information should be kept in comp_runs. update_exclusion_policy = set() if not dag_graph.nodes(): update_exclusion_policy.add("dag_adjacency_list") on_update_stmt = insert_stmt.on_conflict_do_update( index_elements=[comp_pipeline.c.project_id], - set_=pipeline_at_db.dict( + set_=pipeline_at_db.model_dump( by_alias=True, exclude_unset=True, exclude=update_exclusion_policy ), ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index bc43a6badbf..41b1b92c9e8 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -105,7 +105,7 @@ def _compute_node_requirements( node_defined_resources[resource_name] = node_defined_resources.get( resource_name, 0 ) + min(resource_value.limit, resource_value.reservation) - return NodeRequirements.parse_obj(node_defined_resources) + return NodeRequirements.model_validate(node_defined_resources) def _compute_node_boot_mode(node_resources: ServiceResourcesDict) -> BootMode: @@ -187,7 +187,7 @@ async def _generate_task_image( data.update(envs=_compute_node_envs(node_labels)) if node_extras and node_extras.container_spec: data.update(command=node_extras.container_spec.command) - return Image.parse_obj(data) + return Image.model_validate(data) async def _get_pricing_and_hardware_infos( @@ -430,7 +430,7 @@ async def generate_tasks_list_from_project( task_db = CompTaskAtDB( project_id=project.uuid, node_id=NodeID(node_id), - schema=NodeSchema.parse_obj( + schema=NodeSchema.model_validate( node_details.dict( exclude_unset=True, by_alias=True, include={"inputs", "outputs"} ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py index fe033761128..172c577ee56 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py @@ -28,7 +28,7 @@ async def get_projects_networks(self, project_id: ProjectID) -> ProjectsNetworks async def upsert_projects_networks( self, project_id: ProjectID, networks_with_aliases: NetworksWithAliases ) -> None: - projects_networks_to_insert = ProjectsNetworks.parse_obj( + projects_networks_to_insert = ProjectsNetworks.model_validate( {"project_uuid": project_id, "networks_with_aliases": networks_with_aliases} ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py index 01d7fdcce61..0ce81c14bb6 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/user_preferences_frontend.py @@ -31,5 +31,5 @@ async def get_user_preference( return ( None if preference_payload is None - else preference_class.parse_obj(preference_payload) + else preference_class.model_validate(preference_payload) ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py index 0bc8c799dcb..322e5281e46 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py @@ -83,7 +83,7 @@ async def get_service_extras( f"/service_extras/{urllib.parse.quote_plus(service_key)}/{service_version}", ) if resp.status_code == status.HTTP_200_OK: - return ServiceExtras.parse_obj(unenvelope_or_raise_error(resp)) + return ServiceExtras.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) @log_decorator(logger=logger) @@ -94,7 +94,7 @@ async def get_running_service_details( "GET", f"running_interactive_services/{service_uuid}" ) if resp.status_code == status.HTTP_200_OK: - return RunningDynamicServiceDetails.parse_obj( + return RunningDynamicServiceDetails.model_validate( unenvelope_or_raise_error(resp) ) raise HTTPException(status_code=resp.status_code, detail=resp.content) @@ -109,7 +109,7 @@ async def get_service_labels( ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: - return SimcoreServiceLabels.parse_obj(unenvelope_or_raise_error(resp)) + return SimcoreServiceLabels.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) @log_decorator(logger=logger) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py index 7ce782c6366..e07a8b6cc80 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -495,7 +495,7 @@ async def get_service_activity( dynamic_sidecar_endpoint ) decoded_response = response.json() - return ActivityInfo.parse_obj(decoded_response) if decoded_response else None + return ActivityInfo.model_validate(decoded_response) if decoded_response else None async def free_reserved_disk_space( self, dynamic_sidecar_endpoint: AnyHttpUrl diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py index 5100d63bab0..f7c842747f4 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py @@ -304,7 +304,7 @@ def _merge_resources_in_settings( # merge all resources empty_resource_entry: SimcoreServiceSettingLabelEntry = ( - SimcoreServiceSettingLabelEntry.parse_obj( + SimcoreServiceSettingLabelEntry.model_validate( { "name": "Resources", "type": "Resources", @@ -406,7 +406,7 @@ def _get_boot_options( boot_options = json.loads(boot_options_encoded)["boot-options"] log.debug("got boot_options=%s", boot_options) - return {k: BootOption.parse_obj(v) for k, v in boot_options.items()} + return {k: BootOption.model_validate(v) for k, v in boot_options.items()} def _assemble_env_vars_for_boot_options( @@ -511,7 +511,7 @@ async def merge_settings_before_use( ) settings = _patch_target_service_into_env_vars(settings) - return SimcoreServiceSettingsLabel.parse_obj(settings) + return SimcoreServiceSettingsLabel.model_validate(settings) __all__ = ["merge_settings_before_use", "update_service_params_from_settings"] diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index 7e7428b195a..e8206ced2b8 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -553,4 +553,4 @@ async def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: create_service_params=create_service_params, ) - return AioDockerServiceSpec.parse_obj(create_service_params) + return AioDockerServiceSpec.model_validate(create_service_params) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py index 6475e324950..a9a5af803c8 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py @@ -242,12 +242,12 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: scheduler_data.user_id, scheduler_data.key, scheduler_data.version ) ).get("sidecar", {}) or {} - user_specific_service_spec = AioDockerServiceSpec.parse_obj( + user_specific_service_spec = AioDockerServiceSpec.model_validate( user_specific_service_spec ) # NOTE: since user_specific_service_spec follows Docker Service Spec and not Aio # we do not use aliases when exporting dynamic_sidecar_service_spec_base - dynamic_sidecar_service_final_spec = AioDockerServiceSpec.parse_obj( + dynamic_sidecar_service_final_spec = AioDockerServiceSpec.model_validate( nested_update( jsonable_encoder(dynamic_sidecar_service_spec_base, exclude_unset=True), jsonable_encoder(user_specific_service_spec, exclude_unset=True), diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py index 9dbe2763bc9..07b9c94b637 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py @@ -553,7 +553,7 @@ async def get_allow_metrics_collection( bool, AllowMetricsCollectionFrontendUserPreference.get_default_value() ) - allow_metrics_collection = AllowMetricsCollectionFrontendUserPreference.parse_obj( + allow_metrics_collection = AllowMetricsCollectionFrontendUserPreference.model_validate( preference ) return allow_metrics_collection.value diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py index b0335677084..5a4a011a874 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py @@ -76,7 +76,7 @@ def create_model_from_scheduler_data( service_state: ServiceState, service_message: str, ) -> RunningDynamicServiceDetails: - return RunningDynamicServiceDetails.parse_obj( + return RunningDynamicServiceDetails.model_validate( { "boot_type": ServiceBootType.V2, "user_id": scheduler_data.user_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py index ed64923d3c6..e18dfc24121 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/projects_networks.py @@ -64,7 +64,7 @@ async def requires_dynamic_sidecar( simcore_service_labels: SimcoreServiceLabels = ( await director_v0_client.get_service_labels( - service=ServiceKeyVersion.parse_obj( + service=ServiceKeyVersion.model_validate( {"key": decoded_service_key, "version": service_version} ) ) @@ -248,7 +248,7 @@ async def update_from_workbench( ) ) except ProjectNetworkNotFoundError: - existing_projects_networks = ProjectsNetworks.parse_obj( + existing_projects_networks = ProjectsNetworks.model_validate( {"project_uuid": project_id, "networks_with_aliases": {}} ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/storage.py b/services/director-v2/src/simcore_service_director_v2/modules/storage.py index 98e18845333..b0cbb5e9629 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/storage.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/storage.py @@ -72,5 +72,5 @@ async def get_s3_access(self, user_id: UserID) -> S3Settings: ) resp.raise_for_status() if resp.status_code == status.HTTP_200_OK: - return S3Settings.parse_obj(unenvelope_or_raise_error(resp)) + return S3Settings.model_validate(unenvelope_or_raise_error(resp)) raise HTTPException(status_code=resp.status_code, detail=resp.content) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index 422eb52b210..e8f010ec88e 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -229,7 +229,7 @@ async def compute_input_data( if ports_errors: raise PortsValidationError(project_id, node_id, ports_errors) - return TaskInputData.parse_obj(input_data) + return TaskInputData.model_validate(input_data) async def compute_output_data_schema( @@ -276,7 +276,7 @@ async def compute_output_data_schema( } ) - return TaskOutputDataSchema.parse_obj(output_data_schema) + return TaskOutputDataSchema.model_validate(output_data_schema) _LOGS_FILE_NAME = "logs.zip" diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index a1ec0c8bd4d..0ee408a624a 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -239,7 +239,7 @@ def fake_workbench(fake_workbench_file: Path) -> NodesDict: workbench_dict = json.loads(fake_workbench_file.read_text()) workbench = {} for node_id, node_data in workbench_dict.items(): - workbench[node_id] = Node.parse_obj(node_data) + workbench[node_id] = Node.model_validate(node_data) return workbench @@ -336,7 +336,7 @@ async def wrapper(*args, **kwargs): @pytest.fixture def mock_osparc_variables_api_auth_rpc(mocker: MockerFixture) -> None: - fake_data = ApiKeyGet.parse_obj( + fake_data = ApiKeyGet.model_validate( ApiKeyGet.model_config["json_schema_extra"]["examples"][0] ) diff --git a/services/director-v2/tests/helpers/shared_comp_utils.py b/services/director-v2/tests/helpers/shared_comp_utils.py index ad7185e8fa7..5670ffc5de8 100644 --- a/services/director-v2/tests/helpers/shared_comp_utils.py +++ b/services/director-v2/tests/helpers/shared_comp_utils.py @@ -68,7 +68,7 @@ async def check_pipeline_state() -> ComputationGet: assert ( response.status_code == status.HTTP_200_OK ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationGet.parse_obj(response.json()) + task_out = ComputationGet.model_validate(response.json()) assert task_out.id == project_uuid assert task_out.url.path == f"/v2/computations/{project_uuid}" print( diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index 110dbd5f89b..bce13680e29 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -107,7 +107,7 @@ def fake_workbench_computational_pipeline_details( ) -> PipelineDetails: adjacency_list = json.loads(fake_workbench_computational_adjacency_file.read_text()) node_states = json.loads(fake_workbench_node_states_file.read_text()) - return PipelineDetails.parse_obj( + return PipelineDetails.model_validate( {"adjacency_list": adjacency_list, "node_states": node_states, "progress": 0} ) @@ -718,7 +718,7 @@ async def test_abort_computation( assert ( response.status_code == status.HTTP_202_ACCEPTED ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationGet.parse_obj(response.json()) + task_out = ComputationGet.model_validate(response.json()) assert task_out.url.path == f"/v2/computations/{sleepers_project.uuid}:stop" assert task_out.stop_url is None diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index b3825340fd1..a757396139a 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -445,13 +445,13 @@ async def projects_networks_db( # NOTE: director-v2 does not have access to the webserver which creates this # injecting all dynamic-sidecar started services on a default networks - container_aliases: ContainerAliases = ContainerAliases.parse_obj({}) + container_aliases: ContainerAliases = ContainerAliases.model_validate({}) for k, (node_uuid, node) in enumerate(current_study.workbench.items()): if not is_legacy(node): container_aliases[node_uuid] = f"networkable_alias_{k}" - networks_with_aliases: NetworksWithAliases = NetworksWithAliases.parse_obj({}) + networks_with_aliases: NetworksWithAliases = NetworksWithAliases.model_validate({}) default_network_name = f"{PROJECT_NETWORK_PREFIX}_{current_study.uuid}_test" networks_with_aliases[default_network_name] = container_aliases @@ -971,7 +971,7 @@ async def test_nodeports_integration( task_out, project=current_study, exp_task_state=RunningState.SUCCESS, - exp_pipeline_details=PipelineDetails.parse_obj(fake_dy_success), + exp_pipeline_details=PipelineDetails.model_validate(fake_dy_success), iteration=1, cluster_id=DEFAULT_CLUSTER_ID, ) diff --git a/services/director-v2/tests/integration/conftest.py b/services/director-v2/tests/integration/conftest.py index 0e6f8632094..424cb128257 100644 --- a/services/director-v2/tests/integration/conftest.py +++ b/services/director-v2/tests/integration/conftest.py @@ -98,7 +98,7 @@ async def _creator( response.raise_for_status() assert response.status_code == status.HTTP_201_CREATED - computation_task = ComputationGet.parse_obj(response.json()) + computation_task = ComputationGet.model_validate(response.json()) created_comp_tasks.append((user_id, computation_task)) return computation_task diff --git a/services/director-v2/tests/unit/test_models_dynamic_services.py b/services/director-v2/tests/unit/test_models_dynamic_services.py index dd0df8a0eed..e28cd51ca7f 100644 --- a/services/director-v2/tests/unit/test_models_dynamic_services.py +++ b/services/director-v2/tests/unit/test_models_dynamic_services.py @@ -219,7 +219,7 @@ def test_regression_legacy_service_compatibility() -> None: "user_id": "1", "project_id": "b1ec5c8e-f5bb-11eb-b1d5-02420a000006", } - service_details = RunningDynamicServiceDetails.parse_obj(api_response) + service_details = RunningDynamicServiceDetails.model_validate(api_response) assert service_details diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index bc053ed20bf..f45040c143a 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -480,7 +480,7 @@ def task_labels(comp_run_metadata: RunMetadataDict) -> ContainerLabelsDict: @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.parse_obj( + return HardwareInfo.model_validate( HardwareInfo.model_config["json_schema_extra"]["examples"][0] ) @@ -529,7 +529,7 @@ def fake_sidecar_fct( event = distributed.Event(_DASK_EVENT_NAME) event.wait(timeout=25) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster # NOTE2: since there is only 1 task here, it's ok to pass the nodeID @@ -645,7 +645,7 @@ def fake_sidecar_fct( task = worker.state.tasks.get(worker.get_current_task()) assert task is not None - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster published_computation_task = await dask_client.send_computation_tasks( @@ -737,7 +737,7 @@ def fake_remote_fct( print("--> raising cancellation error now") raise TaskCancelledError - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, @@ -1083,7 +1083,7 @@ def fake_remote_fct( if fail_remote_fct: err_msg = "We fail because we're told to!" raise ValueError(err_msg) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) published_computation_task = await dask_client.send_computation_tasks( user_id=user_id, @@ -1174,7 +1174,7 @@ def fake_remote_fct( published_event = Event(name=_DASK_START_EVENT) published_event.set() - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # run the computation published_computation_task = await dask_client.send_computation_tasks( @@ -1250,7 +1250,7 @@ def fake_sidecar_fct( event = distributed.Event(_DASK_EVENT_NAME) event.wait(timeout=25) - return TaskOutputData.parse_obj({"some_output_key": 123}) + return TaskOutputData.model_validate({"some_output_key": 123}) # NOTE: We pass another fct so it can run in our localy created dask cluster published_computation_task = await dask_client.send_computation_tasks( diff --git a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py index 3dd97cc4753..f9e8b1f13b7 100644 --- a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py +++ b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py @@ -87,7 +87,7 @@ def creator(num_clusters: int) -> list[Cluster]: fake_clusters = [] for n in range(num_clusters): fake_clusters.append( - Cluster.parse_obj( + Cluster.model_validate( { "id": faker.pyint(), "name": faker.name(), diff --git a/services/director-v2/tests/unit/test_modules_project_networks.py b/services/director-v2/tests/unit/test_modules_project_networks.py index 585d8131b8e..848b3629e10 100644 --- a/services/director-v2/tests/unit/test_modules_project_networks.py +++ b/services/director-v2/tests/unit/test_modules_project_networks.py @@ -40,8 +40,8 @@ def using( attach: list[Any], ) -> "Example": return cls( - existing_networks_with_aliases=NetworksWithAliases.parse_obj(existing), - new_networks_with_aliases=NetworksWithAliases.parse_obj(new), + existing_networks_with_aliases=NetworksWithAliases.model_validate(existing), + new_networks_with_aliases=NetworksWithAliases.model_validate(new), expected_calls=MockedCalls(detach=detach, attach=attach), ) @@ -184,7 +184,7 @@ def dy_workbench_with_networkable_labels(mocks_dir: Path) -> NodesDict: for node_uuid, node_data in dy_workbench.items(): node_data["label"] = f"label_{uuid4()}" - parsed_workbench[node_uuid] = Node.parse_obj(node_data) + parsed_workbench[node_uuid] = Node.model_validate(node_data) return parsed_workbench diff --git a/services/director-v2/tests/unit/test_utils_comp_scheduler.py b/services/director-v2/tests/unit/test_utils_comp_scheduler.py index ff636be1736..dfb7c0326b1 100644 --- a/services/director-v2/tests/unit/test_utils_comp_scheduler.py +++ b/services/director-v2/tests/unit/test_utils_comp_scheduler.py @@ -78,7 +78,7 @@ def test_get_resource_tracking_run_id( @pytest.mark.parametrize( "task", [ - CompTaskAtDB.parse_obj(example) + CompTaskAtDB.model_validate(example) for example in CompTaskAtDB.model_config["json_schema_extra"]["examples"] ], ids=str, diff --git a/services/director-v2/tests/unit/test_utils_dags.py b/services/director-v2/tests/unit/test_utils_dags.py index 3ab2c68fea1..72e0383e0a4 100644 --- a/services/director-v2/tests/unit/test_utils_dags.py +++ b/services/director-v2/tests/unit/test_utils_dags.py @@ -597,5 +597,5 @@ async def test_compute_pipeline_details( pipeline_test_params.comp_tasks, ) assert ( - received_details.dict() == pipeline_test_params.expected_pipeline_details.dict() + received_details.model_dump() == pipeline_test_params.expected_pipeline_details.model_dump() ) diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index c508f8e7ada..126c1e3c2f5 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -226,7 +226,7 @@ def creator(user: dict[str, Any], **cluster_kwargs) -> Cluster: cluster_config = Cluster.model_config["json_schema_extra"]["examples"][1] cluster_config["owner"] = user["primary_gid"] cluster_config.update(**cluster_kwargs) - new_cluster = Cluster.parse_obj(cluster_config) + new_cluster = Cluster.model_validate(cluster_config) assert new_cluster with postgres_db.connect() as conn: diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py index 65a080b90e7..2ab5f347879 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py @@ -70,7 +70,7 @@ def creator() -> dict[str, Any]: "username": faker.user_name(), "password": faker.password(), } - assert SimpleAuthentication.parse_obj(simple_auth) + assert SimpleAuthentication.model_validate(simple_auth) return simple_auth return creator diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py index 2b509ab1a6f..e9394aba278 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py @@ -114,12 +114,12 @@ async def test_get_default_cluster_details( f"/v2/clusters/default/details?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK - default_cluster_out = ClusterDetailsGet.parse_obj(response.json()) + default_cluster_out = ClusterDetailsGet.model_validate(response.json()) response = await async_client.get( f"/v2/clusters/{0}/details?user_id={user_1['id']}" ) assert response.status_code == status.HTTP_200_OK - assert default_cluster_out == ClusterDetailsGet.parse_obj(response.json()) + assert default_cluster_out == ClusterDetailsGet.model_validate(response.json()) async def _get_cluster_details( @@ -130,7 +130,7 @@ async def _get_cluster_details( ) assert response.status_code == status.HTTP_200_OK print(f"<-- received cluster details response {response=}") - cluster_out = ClusterDetailsGet.parse_obj(response.json()) + cluster_out = ClusterDetailsGet.model_validate(response.json()) assert cluster_out print(f"<-- received cluster details {cluster_out=}") assert cluster_out.scheduler, "the cluster's scheduler is not started!" diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 73a9508b8d0..156b4c5b881 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -148,7 +148,7 @@ def mocked_director_service_fcts( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+$" ), name="get_service", - ).respond(json={"data": [fake_service_details.dict(by_alias=True)]}) + ).respond(json={"data": [fake_service_details.model_dump(by_alias=True)]}) respx_mock.get( re.compile( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+/labels" @@ -161,7 +161,7 @@ def mocked_director_service_fcts( r"/service_extras/(simcore)%2F(services)%2F(comp|dynamic|frontend)%2F.+/(.+)" ), name="get_service_extras", - ).respond(json={"data": fake_service_extras.dict(by_alias=True)}) + ).respond(json={"data": fake_service_extras.model_dump(by_alias=True)}) yield respx_mock diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py index c0a48e3da95..2afb2b2070a 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py @@ -162,7 +162,7 @@ async def mock_retrieve_features( assert_all_mocked=True, ) as respx_mock: if is_legacy: - service_details = RunningDynamicServiceDetails.parse_obj( + service_details = RunningDynamicServiceDetails.model_validate( RunningDynamicServiceDetails.model_config["json_schema_extra"][ "examples" ][0] @@ -254,7 +254,7 @@ def get_stack_status(node_uuid: NodeID) -> RunningDynamicServiceDetails: if exp_status_code == status.HTTP_307_TEMPORARY_REDIRECT: raise DynamicSidecarNotFoundError(node_uuid) - return RunningDynamicServiceDetails.parse_obj( + return RunningDynamicServiceDetails.model_validate( RunningDynamicServiceDetails.model_config["json_schema_extra"]["examples"][ 0 ] @@ -338,7 +338,7 @@ def test_create_dynamic_services( exp_status_code: int, is_legacy: bool, ): - post_data = DynamicServiceCreate.parse_obj(service) + post_data = DynamicServiceCreate.model_validate(service) response = client.post( "/v2/dynamic_services", @@ -550,7 +550,7 @@ def test_delete_service_waiting_for_manual_intervention( is_legacy: bool, dynamic_sidecar_scheduler: DynamicSidecarsScheduler, ): - post_data = DynamicServiceCreate.parse_obj(service) + post_data = DynamicServiceCreate.model_validate(service) response = client.post( "/v2/dynamic_services", diff --git a/services/director-v2/tests/unit/with_dbs/test_cli.py b/services/director-v2/tests/unit/with_dbs/test_cli.py index da0bc605603..1892e2a5a38 100644 --- a/services/director-v2/tests/unit/with_dbs/test_cli.py +++ b/services/director-v2/tests/unit/with_dbs/test_cli.py @@ -106,7 +106,7 @@ def node_id(faker: Faker) -> NodeID: def mock_get_node_state(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_director_v2.cli._core._get_dy_service_state", - return_value=DynamicServiceGet.parse_obj( + return_value=DynamicServiceGet.model_validate( RunningDynamicServiceDetails.model_config["json_schema_extra"]["examples"][ 0 ] diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index daf6df774cf..ae8b6326297 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -122,7 +122,7 @@ async def _assert_comp_run_db( & (comp_runs.c.project_uuid == f"{pub_project.project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert ( run_entry.result == expected_state ), f"comp_runs: expected state '{expected_state}, found '{run_entry.result}'" @@ -365,7 +365,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert run_entry.result == RunningState.PUBLISHED # let the scheduler kick in await run_comp_scheduler(scheduler) @@ -379,7 +379,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}") ) # there is only one entry ) - run_entry = CompRunsAtDB.parse_obj(await result.first()) + run_entry = CompRunsAtDB.model_validate(await result.first()) assert run_entry.result == RunningState.ABORTED assert run_entry.metadata == run_metadata @@ -756,7 +756,7 @@ async def _return_1st_task_success(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.side_effect = _return_1st_task_success async def _return_random_task_result(job_id) -> TaskOutputData: - return TaskOutputData.parse_obj({"out_1": None, "out_2": 45}) + return TaskOutputData.model_validate({"out_1": None, "out_2": 45}) mocked_dask_client.get_task_result.side_effect = _return_random_task_result await run_comp_scheduler(scheduler) @@ -1175,7 +1175,7 @@ class RebootState: pytest.param( RebootState( dask_task_status=DaskClientTaskState.SUCCESS, - task_result=TaskOutputData.parse_obj({"whatever_output": 123}), + task_result=TaskOutputData.model_validate({"whatever_output": 123}), expected_task_state_group1=RunningState.SUCCESS, expected_task_progress_group1=1, expected_task_state_group2=RunningState.SUCCESS, diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 3be8d77c6f5..0ccb2ca5c86 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -330,7 +330,7 @@ def service_name() -> str: @pytest.fixture( params=[ - SimcoreServiceLabels.parse_obj(example) + SimcoreServiceLabels.model_validate(example) for example in SimcoreServiceLabels.model_config["json_schema_extra"][ "examples" ] @@ -397,7 +397,7 @@ def test_settings__valid_network_names( items["SIMCORE_SERVICES_NETWORK_NAME"] = simcore_services_network_name # validate network names - DynamicServicesSchedulerSettings.parse_obj(items) + DynamicServicesSchedulerSettings.model_validate(items) async def test_failed_docker_client_request(docker_swarm: None): diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 66b2022d7c0..907ba2776bd 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -49,7 +49,7 @@ @pytest.fixture def mock_s3_settings() -> S3Settings: - return S3Settings.parse_obj( + return S3Settings.model_validate( S3Settings.model_config["json_schema_extra"]["examples"][0] ) @@ -117,14 +117,14 @@ def swarm_network_id() -> str: @pytest.fixture def simcore_service_labels() -> SimcoreServiceLabels: # overwrites global fixture - return SimcoreServiceLabels.parse_obj( + return SimcoreServiceLabels.model_validate( SimcoreServiceLabels.model_config["json_schema_extra"]["examples"][2] ) @pytest.fixture def hardware_info() -> HardwareInfo: - return HardwareInfo.parse_obj( + return HardwareInfo.model_validate( HardwareInfo.model_config["json_schema_extra"]["examples"][0] ) @@ -141,7 +141,7 @@ def expected_dynamic_sidecar_spec( return { "endpoint_spec": {}, "labels": { - "io.simcore.scheduler-data": SchedulerData.parse_obj( + "io.simcore.scheduler-data": SchedulerData.model_validate( { "compose_spec": '{"version": "2.3", "services": {"rt-web": {"image": ' '"${SIMCORE_REGISTRY}/simcore/services/dynamic/sim4life:${SERVICE_VERSION}", ' @@ -443,7 +443,7 @@ async def test_get_dynamic_proxy_spec( == minimal_app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR ) - expected_dynamic_sidecar_spec_model = AioDockerServiceSpec.parse_obj( + expected_dynamic_sidecar_spec_model = AioDockerServiceSpec.model_validate( expected_dynamic_sidecar_spec ) assert expected_dynamic_sidecar_spec_model.TaskTemplate @@ -570,7 +570,7 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( ) assert dynamic_sidecar_spec dynamic_sidecar_spec_dict = dynamic_sidecar_spec.dict() - expected_dynamic_sidecar_spec_dict = AioDockerServiceSpec.parse_obj( + expected_dynamic_sidecar_spec_dict = AioDockerServiceSpec.model_validate( expected_dynamic_sidecar_spec ).dict() # ensure some entries are sorted the same to prevent flakyness @@ -599,7 +599,7 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( ) assert user_service_specs assert "sidecar" in user_service_specs - user_aiodocker_service_spec = AioDockerServiceSpec.parse_obj( + user_aiodocker_service_spec = AioDockerServiceSpec.model_validate( user_service_specs["sidecar"] ) assert user_aiodocker_service_spec diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index e6aa8de204b..977828e4753 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -196,7 +196,7 @@ def generate_simcore_file_link() -> dict[str, Any]: path=create_simcore_file_id( faker.uuid4(), faker.uuid4(), faker.file_name() ), - ).dict(by_alias=True, exclude_unset=True) + ).model_dump(by_alias=True, exclude_unset=True) TYPE_TO_FAKE_CALLABLE_MAP = { "number": faker.pyfloat, @@ -316,7 +316,7 @@ async def test_compute_input_data( sleeper_task.node_id, faker.file_name(), ), - ).dict(by_alias=True, exclude_unset=True) + ).model_dump(by_alias=True, exclude_unset=True) if value_type["type"] == "data:*/*" else fake_io_data[key] ) @@ -446,7 +446,7 @@ async def test_clean_task_output_and_log_files_if_invalid( path=create_simcore_file_id( published_project.project.uuid, sleeper_task.node_id, faker.file_name() ), - ).dict(by_alias=True, exclude_unset=True) + ).model_dump(by_alias=True, exclude_unset=True) for key, value_type in fake_io_schema.items() if value_type["type"] == "data:*/*" } From 510fb0ac4ac2f2cf1b4e411670f57b047f006278 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 5 Nov 2024 22:22:59 +0100 Subject: [PATCH 011/121] fix deprecated --- .../api/routes/dynamic_services.py | 2 +- services/director-v2/tests/conftest.py | 4 ++-- .../tests/unit/with_dbs/test_api_route_clusters.py | 6 +++--- .../unit/with_dbs/test_api_route_computations_tasks.py | 2 +- .../tests/unit/with_dbs/test_api_route_dynamic_services.py | 4 ++-- .../with_dbs/test_modules_comp_scheduler_dask_scheduler.py | 4 ++-- .../with_dbs/test_modules_dynamic_sidecar_docker_api.py | 2 +- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py index 43fcf83e23d..e953c0f7d75 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py @@ -281,7 +281,7 @@ async def service_retrieve_data_on_ports( response = await services_client.request( "POST", f"{service_base_url}/retrieve", - content=retrieve_settings.json(by_alias=True), + content=retrieve_settings.model_dump_json(by_alias=True), timeout=timeout, ) diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index 0ee408a624a..d04ff9134aa 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -198,7 +198,7 @@ def mock_env( async def client(mock_env: EnvVarsDict) -> AsyncIterator[TestClient]: settings = AppSettings.create_from_envs() app = init_app(settings) - print("Application settings\n", settings.json(indent=2)) + print("Application settings\n", settings.model_dump_json(indent=2)) # NOTE: this way we ensure the events are run in the application # since it starts the app on a test server with TestClient(app, raise_server_exceptions=True) as test_client: @@ -209,7 +209,7 @@ async def client(mock_env: EnvVarsDict) -> AsyncIterator[TestClient]: async def initialized_app(mock_env: EnvVarsDict) -> AsyncIterable[FastAPI]: settings = AppSettings.create_from_envs() app = init_app(settings) - print("Application settings\n", settings.json(indent=2)) + print("Application settings\n", settings.model_dump_json(indent=2)) async with LifespanManager(app): yield app diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py index 2ab5f347879..149bd282b78 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py @@ -311,7 +311,7 @@ async def test_create_cluster( response = await async_client.post( create_cluster_url, json=json.loads( - cluster_data.json( + cluster_data.model_dump_json( by_alias=True, exclude_unset=True, encoder=create_json_encoder_wo_secrets(ClusterCreate), @@ -355,7 +355,7 @@ async def test_update_own_cluster( response = await async_client.patch( f"/v2/clusters/15615165165165?user_id={user_1['id']}", json=json.loads( - ClusterPatch().json( + ClusterPatch().model_dump_json( **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) ) ), @@ -377,7 +377,7 @@ async def test_update_own_cluster( response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", json=json.loads( - ClusterPatch().json( + ClusterPatch().model_dump_json( **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) ) ), diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py index c908f3daef0..65741f7f526 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py @@ -69,7 +69,7 @@ def _get_app(async_client: httpx.AsyncClient) -> FastAPI: settings: AppSettings = app.state.settings assert settings - print(settings.json(indent=1)) + print(settings.model_dump_json(indent=1)) return async_client diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py index 2afb2b2070a..58557b66801 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py @@ -343,7 +343,7 @@ def test_create_dynamic_services( response = client.post( "/v2/dynamic_services", headers=dynamic_sidecar_headers, - json=json.loads(post_data.json()), + json=json.loads(post_data.model_dump_json()), follow_redirects=False, ) assert ( @@ -555,7 +555,7 @@ def test_delete_service_waiting_for_manual_intervention( response = client.post( "/v2/dynamic_services", headers=dynamic_sidecar_headers, - json=json.loads(post_data.json()), + json=json.loads(post_data.model_dump_json()), ) assert ( response.status_code == exp_status_code diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index ae8b6326297..32117e790c0 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -598,7 +598,7 @@ async def _trigger_progress_event( ), ) await cast(DaskScheduler, scheduler)._task_progress_change_handler( # noqa: SLF001 - event.json() + event.model_dump_json() ) @@ -1021,7 +1021,7 @@ async def test_task_progress_triggers( await cast( DaskScheduler, scheduler )._task_progress_change_handler( # noqa: SLF001 - progress_event.json() + progress_event.model_dump_json() ) # NOTE: not sure whether it should switch to STARTED.. it would make sense await _assert_comp_tasks_db( diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 0ccb2ca5c86..7b00b23ebee 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -195,7 +195,7 @@ def dynamic_sidecar_service_spec( f"{to_simcore_runtime_docker_label_key('service_port')}": "80", f"{to_simcore_runtime_docker_label_key('service_key')}": "simcore/services/dynamic/3dviewer", f"{to_simcore_runtime_docker_label_key('service_version')}": "2.4.5", - DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL: scheduler_data_from_http_request.json(), + DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL: scheduler_data_from_http_request.model_dump_json(), }, } From 392bcd4c0c273cfe32a0100e6f5ba1c32128edc7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 5 Nov 2024 22:30:29 +0100 Subject: [PATCH 012/121] fix field access --- .../modules/dynamic_sidecar/docker_states.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py index 5a503f8b8a8..afd44dc0f59 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py @@ -74,10 +74,10 @@ def extract_task_state(task_status: dict[str, str]) -> tuple[ServiceState, str]: def _extract_container_status( container_state: ContainerState, ) -> tuple[ServiceState, ServiceMessage]: - assert container_state.Status # nosec + assert container_state.status # nosec return ( - _CONTAINER_STATE_TO_SERVICE_STATE[container_state.Status], - container_state.Error if container_state.Error else "", + _CONTAINER_STATE_TO_SERVICE_STATE[container_state.status], + container_state.error if container_state.error else "", ) From 6f2e8dbf4f192296347dee3227287ed2adf280f8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 5 Nov 2024 22:35:31 +0100 Subject: [PATCH 013/121] fix field auto_default prop --- .../core/dynamic_services_settings/__init__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py index b4abd4f5b6e..c3ed002edd6 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py @@ -13,22 +13,22 @@ class DynamicServicesSettings(BaseCustomSettings): default=True, description="Enables/Disables the dynamic_sidecar submodule" ) - DYNAMIC_SIDECAR: DynamicSidecarSettings = Field(auto_default_from_env=True) + DYNAMIC_SIDECAR: DynamicSidecarSettings = Field(json_schema_extra={"auto_default_from_env": True}) DYNAMIC_SCHEDULER: DynamicServicesSchedulerSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_PROXY_SETTINGS: DynamicSidecarProxySettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_EGRESS_PROXY_SETTINGS: EgressProxySettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) DYNAMIC_SIDECAR_PLACEMENT_SETTINGS: PlacementSettings = Field( - auto_default_from_env=True + json_schema_extra={"auto_default_from_env": True} ) - WEBSERVER_SETTINGS: WebServerSettings = Field(auto_default_from_env=True) + WEBSERVER_SETTINGS: WebServerSettings = Field(json_schema_extra={"auto_default_from_env": True}) From 8618098ab5828d8b0d4c45d7e501314e5b5f0f16 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 5 Nov 2024 22:39:55 +0100 Subject: [PATCH 014/121] continue fixing --- .../cli/_close_and_save_service.py | 2 +- .../models/dynamic_services_scheduler.py | 8 ++++---- .../modules/dynamic_sidecar/errors.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py b/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py index 2f04477f06a..fb8f70bf62f 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py @@ -106,7 +106,7 @@ async def async_close_and_save_service( client = Client( app=app, async_client=thin_dv2_localhost_client.client, - base_url=TypeAdapter(AnyHttpUrl).validate_python(thin_dv2_localhost_client.BASE_ADDRESS), + base_url=f"{TypeAdapter(AnyHttpUrl).validate_python(thin_dv2_localhost_client.BASE_ADDRESS)}", ) if not skip_container_removal: diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 280710fda3d..901a8cc5f26 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -127,8 +127,8 @@ class DockerContainerInspect(BaseModel): @cached_property def status(self) -> DockerStatus: - assert self.container_state.Status # nosec - result: DockerStatus = self.container_state.Status + assert self.container_state.status # nosec + result: DockerStatus = self.container_state.status return result @classmethod @@ -198,7 +198,7 @@ class DynamicSidecar(BaseModel): is_ready: bool = Field( default=False, - scription=( + description=( "is True while the health check on the dynamic-sidecar is responding. " "Meaning that the dynamic-sidecar is reachable and can accept requests" ), @@ -220,7 +220,7 @@ def compose_spec_submitted(self) -> bool: containers_inspect: list[DockerContainerInspect] = Field( [], - scription="docker inspect results from all the container ran at regular intervals", + description="docker inspect results from all the container ran at regular intervals", ) was_dynamic_sidecar_started: bool = False diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py index ecb86e9a6aa..62c38278e1c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py @@ -1,6 +1,6 @@ from aiodocker.exceptions import DockerError +from common_library.errors_classes import OsparcErrorMixin from models_library.projects_nodes_io import NodeID -from pydantic.errors import PydanticErrorMixin from ...core.errors import DirectorError @@ -39,6 +39,6 @@ class LegacyServiceIsNotSupportedError(DirectorError): """This API is not implemented by the director-v0""" -class UnexpectedContainerStatusError(PydanticErrorMixin, DynamicSidecarError): +class UnexpectedContainerStatusError(OsparcErrorMixin, DynamicSidecarError): code = "dynamic_sidecar.container_status" msg_template = "Unexpected status from containers: {containers_with_error}" From d8dfa69a559afa5459ee055d3638f31ad5154caf Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 09:30:00 +0100 Subject: [PATCH 015/121] remove pydantic-settings constraint --- services/director-v2/requirements/constraints.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/services/director-v2/requirements/constraints.txt b/services/director-v2/requirements/constraints.txt index c02043ff33a..e69de29bb2d 100644 --- a/services/director-v2/requirements/constraints.txt +++ b/services/director-v2/requirements/constraints.txt @@ -1,2 +0,0 @@ -# GCR remove me -pydantic-settings<2.6 \ No newline at end of file From ddafb2ff2e3ed47d66b14f9b837a588e027da62c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 10:25:54 +0100 Subject: [PATCH 016/121] fix settings --- .../src/simcore_service_director_v2/core/settings.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 57f54ce9f15..c3010401679 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -29,7 +29,7 @@ field_validator, ) from servicelib.logging_utils_filtering import LoggerName, MessageSubstring -from settings_library.base import BaseCustomSettings +from settings_library.application import BaseApplicationSettings from settings_library.catalog import CatalogSettings from settings_library.docker_registry import RegistrySettings from settings_library.http_client_request import ClientRequestSettings @@ -52,7 +52,7 @@ from .dynamic_services_settings import DynamicServicesSettings -class DirectorV0Settings(BaseCustomSettings): +class DirectorV0Settings(BaseApplicationSettings): DIRECTOR_V0_ENABLED: bool = True DIRECTOR_HOST: str = "director" @@ -72,7 +72,7 @@ def endpoint(self) -> str: return url -class ComputationalBackendSettings(BaseCustomSettings): +class ComputationalBackendSettings(BaseApplicationSettings): COMPUTATIONAL_BACKEND_ENABLED: bool = Field( default=True, ) @@ -123,7 +123,7 @@ def _empty_auth_is_none(cls, v): return v -class AppSettings(BaseCustomSettings, MixinLoggingSettings): +class AppSettings(BaseApplicationSettings, MixinLoggingSettings): # docker environs SC_BOOT_MODE: BootModeEnum SC_BOOT_TARGET: BuildTargetEnum | None From 95c714f24c27a26fe67cce70dc87145f33e082a9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 10:38:01 +0100 Subject: [PATCH 017/121] continue fix --- .../simcore_service_director_v2/models/comp_tasks.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index dca5279087f..5c36752e3fa 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -58,7 +58,7 @@ class Image(BaseModel): @field_validator("node_requirements", mode="before") @classmethod - def migrate_from_requirements(cls, v, info: ValidationInfo): + def _migrate_from_requirements(cls, v, info: ValidationInfo): if v is None: # NOTE: 'node_requirements' field's default=None although is NOT declared as nullable. # Then this validator with `pre=True, always=True` is used to create a default @@ -147,12 +147,12 @@ class CompTaskAtDB(BaseModel): created: datetime.datetime modified: datetime.datetime # Additional information about price and hardware (ex. AWS EC2 instance type) - pricing_info: dict | None = None + pricing_info: dict | None hardware_info: HardwareInfo @field_validator("state", mode="before") @classmethod - def convert_state_from_state_type_enum_if_needed(cls, v): + def _convert_state_from_state_type_enum_if_needed(cls, v): if isinstance(v, str): # try to convert to a StateType, if it fails the validations will continue # and pydantic will try to convert it to a RunninState later on @@ -164,14 +164,14 @@ def convert_state_from_state_type_enum_if_needed(cls, v): @field_validator("start", "end", "submit") @classmethod - def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: + def _ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: if v is not None and v.tzinfo is None: v = v.replace(tzinfo=datetime.UTC) return v @field_validator("hardware_info", mode="before") @classmethod - def backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo: + def _backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo: if v is None: return HardwareInfo(aws_ec2_instances=[]) return v From bb10df176b641e9a6311616bde251ed617f31312 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 10:59:07 +0100 Subject: [PATCH 018/121] fix deprecated --- .../modules/db/repositories/comp_tasks/_core.py | 10 +++++----- .../modules/db/repositories/projects.py | 2 +- services/director-v2/tests/unit/with_dbs/conftest.py | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py index dabb45dfb0f..aa72e996d7f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py @@ -44,7 +44,7 @@ async def get_task(self, project_id: ProjectID, node_id: NodeID) -> CompTaskAtDB row = await result.fetchone() if not row: raise ComputationalTaskNotFoundError(node_id=node_id) - return CompTaskAtDB.from_orm(row) + return CompTaskAtDB.model_validate(row) async def list_tasks( self, @@ -55,7 +55,7 @@ async def list_tasks( async for row in conn.execute( sa.select(comp_tasks).where(comp_tasks.c.project_id == f"{project_id}") ): - task_db = CompTaskAtDB.from_orm(row) + task_db = CompTaskAtDB.model_validate(row) tasks.append(task_db) return tasks @@ -72,7 +72,7 @@ async def list_computational_tasks( & (comp_tasks.c.node_class == NodeClass.COMPUTATIONAL) ) ): - task_db = CompTaskAtDB.from_orm(row) + task_db = CompTaskAtDB.model_validate(row) tasks.append(task_db) return tasks @@ -166,7 +166,7 @@ async def upsert_tasks_from_project( result = await conn.execute(on_update_stmt) row = await result.fetchone() assert row # nosec - inserted_comp_tasks_db.append(CompTaskAtDB.from_orm(row)) + inserted_comp_tasks_db.append(CompTaskAtDB.model_validate(row)) _logger.debug( "inserted the following tasks in comp_tasks: %s", f"{inserted_comp_tasks_db=}", @@ -193,7 +193,7 @@ async def _update_task( ) row = await result.fetchone() assert row # nosec - return CompTaskAtDB.from_orm(row) + return CompTaskAtDB.model_validate(row) async def mark_project_published_waiting_for_cluster_tasks_as_aborted( self, project_id: ProjectID diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py index 856c0ec3650..5f5fe5263ff 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py @@ -23,7 +23,7 @@ async def get_project(self, project_id: ProjectID) -> ProjectAtDB: ).first() if not row: raise ProjectNotFoundError(project_id) - return ProjectAtDB.from_orm(row) + return ProjectAtDB.model_validate(row) async def is_node_present_in_workbench( self, project_id: ProjectID, node_uuid: NodeID diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 126c1e3c2f5..78a17e79777 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -59,7 +59,7 @@ def creator(**pipeline_kwargs) -> CompPipelineAtDB: ) assert result - new_pipeline = CompPipelineAtDB.from_orm(result.first()) + new_pipeline = CompPipelineAtDB.model_validate(result.first()) created_pipeline_ids.append(f"{new_pipeline.project_id}") return new_pipeline @@ -134,7 +134,7 @@ def creator( .values(**task_config) .returning(sa.literal_column("*")) ) - new_task = CompTaskAtDB.from_orm(result.first()) + new_task = CompTaskAtDB.model_validate(result.first()) created_tasks.append(new_task) created_task_ids.extend([t.task_id for t in created_tasks if t.task_id]) return created_tasks @@ -205,7 +205,7 @@ def creator( .values(**jsonable_encoder(run_config)) .returning(sa.literal_column("*")) ) - new_run = CompRunsAtDB.from_orm(result.first()) + new_run = CompRunsAtDB.model_validate(result.first()) created_run_ids.append(new_run.run_id) return new_run From 28dd6fd8ef76f4cbeee1f4259a3eb48fd5575b9f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 11:01:12 +0100 Subject: [PATCH 019/121] fix deprecated --- .../modules/db/repositories/comp_pipelines.py | 2 +- .../modules/db/repositories/comp_runs.py | 8 ++++---- .../modules/db/repositories/projects_networks.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py index da273c79159..37129141f6d 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py @@ -26,7 +26,7 @@ async def get_pipeline(self, project_id: ProjectID) -> CompPipelineAtDB: row: RowProxy | None = await result.fetchone() if not row: raise PipelineNotFoundError(str(project_id)) - return CompPipelineAtDB.from_orm(row) + return CompPipelineAtDB.model_validate(row) async def upsert_pipeline( self, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index 4f9a8e42b53..289a0063649 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -51,7 +51,7 @@ async def get( row: RowProxy | None = await result.first() if not row: raise ComputationalRunNotFoundError - return CompRunsAtDB.from_orm(row) + return CompRunsAtDB.model_validate(row) async def list( self, filter_by_state: set[RunningState] | None = None @@ -70,7 +70,7 @@ async def list( ) ) ): - runs_in_db.append(CompRunsAtDB.from_orm(row)) + runs_in_db.append(CompRunsAtDB.model_validate(row)) return list(runs_in_db) async def create( @@ -114,7 +114,7 @@ async def create( .returning(literal_column("*")) ) row = await result.first() - return CompRunsAtDB.from_orm(row) + return CompRunsAtDB.model_validate(row) except ForeignKeyViolation as exc: raise ClusterNotFoundError(cluster_id=cluster_id) from exc @@ -133,7 +133,7 @@ async def update( .returning(literal_column("*")) ) row = await result.first() - return CompRunsAtDB.from_orm(row) if row else None + return CompRunsAtDB.model_validate(row) if row else None async def set_run_result( self, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py index 172c577ee56..c2233030622 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py @@ -23,7 +23,7 @@ async def get_projects_networks(self, project_id: ProjectID) -> ProjectsNetworks ).first() if not row: raise ProjectNetworkNotFoundError(project_id) - return ProjectsNetworks.from_orm(row) + return ProjectsNetworks.model_validate(row) async def upsert_projects_networks( self, project_id: ProjectID, networks_with_aliases: NetworksWithAliases From ff2781190ce20a17add1bde73e33d013edef646d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 11:07:06 +0100 Subject: [PATCH 020/121] fix url --- .../src/simcore_service_director_v2/core/settings.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index c3010401679..ffd5332d642 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -63,11 +63,13 @@ class DirectorV0Settings(BaseApplicationSettings): @cached_property def endpoint(self) -> str: - url: str = AnyHttpUrl.build( - scheme="http", - host=self.DIRECTOR_HOST, - port=self.DIRECTOR_PORT, - path=f"/{self.DIRECTOR_V0_VTAG}", + url: str = str( + AnyHttpUrl.build( + scheme='http', + host=self.DIRECTOR_HOST, + port=self.DIRECTOR_PORT, + path=f"/{self.DIRECTOR_V0_VTAG}", + ) ) return url From 75ac5c88ac877a1f0454f5128c83a1540d181c50 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 11:08:01 +0100 Subject: [PATCH 021/121] fix root field --- .../modules/dynamic_sidecar/docker_service_specs/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py index f7c842747f4..7ffd87a4561 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py @@ -423,7 +423,7 @@ def _assemble_env_vars_for_boot_options( env_vars.append(f"{env_var_name}={value}") return SimcoreServiceSettingsLabel( - __root__=[ + root=[ SimcoreServiceSettingLabelEntry( name="env", type="string", value=list(env_vars) ) From edbd1c63600e347c6cecc19fdbaa197525dacc5b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 11:24:52 +0100 Subject: [PATCH 022/121] continue fixing --- .../simcore_service_director_v2/api/errors/http_error.py | 2 +- .../src/simcore_service_director_v2/models/comp_tasks.py | 4 ++-- .../modules/dynamic_sidecar/api_client/_public.py | 2 +- .../tests/unit/test_utils_distributed_identifier.py | 6 +++--- .../unit/with_dbs/test_api_route_computations_tasks.py | 4 ++-- .../with_dbs/test_modules_comp_scheduler_dask_scheduler.py | 4 ++-- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py b/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py index da51e6f9e26..2026774d7a5 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py +++ b/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py @@ -14,7 +14,7 @@ async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse: def make_http_error_handler_for_exception( status_code: int, exception_cls: type[BaseException] -) -> Callable[[Request, type[BaseException]], Awaitable[JSONResponse]]: +) -> Callable[[Request, Exception], Awaitable[JSONResponse]]: """ Produces a handler for BaseException-type exceptions which converts them into an error JSON response with a given status code diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 5c36752e3fa..6af64d2d84f 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -230,8 +230,8 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: "state": "NOT_STARTED", "progress": 0.44, "last_heartbeat": None, - "created": "2022-05-20 13:28:31.139+00", - "modified": "2023-06-23 15:58:32.833081+00", + "created": "2022-05-20T13:28:31.139", + "modified": "2023-06-23T15:58:32.83308", "pricing_info": { "pricing_plan_id": 1, "pricing_unit_id": 1, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py index e07a8b6cc80..3ee32e3a9b0 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -293,7 +293,7 @@ def _get_client(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Client: return Client( app=self._app, async_client=self._async_client, - base_url=dynamic_sidecar_endpoint, + base_url=f"{dynamic_sidecar_endpoint}", ) async def _await_for_result( diff --git a/services/director-v2/tests/unit/test_utils_distributed_identifier.py b/services/director-v2/tests/unit/test_utils_distributed_identifier.py index ce200feef97..a9fd8a42a0a 100644 --- a/services/director-v2/tests/unit/test_utils_distributed_identifier.py +++ b/services/director-v2/tests/unit/test_utils_distributed_identifier.py @@ -10,7 +10,7 @@ from uuid import UUID, uuid4 import pytest -from pydantic import BaseModel, NonNegativeInt, StrBytes +from pydantic import BaseModel, NonNegativeInt from pytest_mock import MockerFixture from servicelib.redis import RedisClientSDK from servicelib.utils import logged_gather @@ -132,8 +132,8 @@ def _serialize_identifier(cls, identifier: UserDefinedID) -> str: return f"{identifier._id}" # noqa: SLF001 @classmethod - def _deserialize_cleanup_context(cls, raw: StrBytes) -> AnEmptyTextCleanupContext: - return AnEmptyTextCleanupContext.parse_raw(raw) + def _deserialize_cleanup_context(cls, raw: str | bytes) -> AnEmptyTextCleanupContext: + return AnEmptyTextCleanupContext.model_validate_json(raw) @classmethod def _serialize_cleanup_context( diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py index 65741f7f526..3c35009c3db 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py @@ -9,6 +9,7 @@ from uuid import uuid4 import httpx +from pydantic import TypeAdapter import pytest from faker import Faker from fastapi import FastAPI, status @@ -21,7 +22,6 @@ from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from pydantic import parse_raw_as from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director_v2.core.settings import AppSettings @@ -162,7 +162,7 @@ async def test_get_all_tasks_log_files( # test expected response according to OAS! assert resp.status_code == status.HTTP_200_OK - log_files = parse_raw_as(list[TaskLogFileGet], resp.text) + log_files = TypeAdapter(list[TaskLogFileGet]).validate_json(resp.text) assert log_files assert all(l.download_link for l in log_files) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index 32117e790c0..1d44e206929 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -41,7 +41,7 @@ RabbitResourceTrackingStoppedMessage, ) from models_library.users import UserID -from pydantic import TypeAdapter, parse_raw_as +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQClient @@ -732,7 +732,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta assert messages[0].service_uuid == exp_started_task.node_id def _parser(x) -> RabbitResourceTrackingMessages: - return parse_raw_as(RabbitResourceTrackingMessages, x) + return TypeAdapter(RabbitResourceTrackingMessages).validate_json(x) messages = await _assert_message_received( resource_tracking_rabbit_client_parser, From c091c4b14584091a8867667a74ecfd5aa8c94ac8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 11:33:44 +0100 Subject: [PATCH 023/121] fix deprecated --- .../modules/comp_scheduler/dask_scheduler.py | 4 ++-- .../modules/rabbitmq.py | 2 +- ...dels_schemas_dynamic_services_scheduler.py | 4 ++-- ...test_schemas_dynamic_services_scheduler.py | 4 ++-- .../test_api_route_computations_tasks.py | 2 +- ...t_modules_comp_scheduler_dask_scheduler.py | 24 +++++++++---------- ...test_modules_dynamic_sidecar_docker_api.py | 2 +- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/dask_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/dask_scheduler.py index 3890ee1f7ad..5c36b8d6b7b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/dask_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/dask_scheduler.py @@ -322,7 +322,7 @@ async def _process_task_result( async def _task_progress_change_handler(self, event: str) -> None: with log_catch(_logger, reraise=False): - task_progress_event = TaskProgressEvent.parse_raw(event) + task_progress_event = TaskProgressEvent.model_validate_json(event) _logger.debug("received task progress update: %s", task_progress_event) user_id = task_progress_event.task_owner.user_id project_id = task_progress_event.task_owner.project_id @@ -353,7 +353,7 @@ async def _task_progress_change_handler(self, event: str) -> None: async def _task_log_change_handler(self, event: str) -> None: with log_catch(_logger, reraise=False): - task_log_event = TaskLogEvent.parse_raw(event) + task_log_event = TaskLogEvent.model_validate_json(event) _logger.debug("received task log update: %s", task_log_event) await publish_service_log( self.rabbitmq_client, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/rabbitmq.py b/services/director-v2/src/simcore_service_director_v2/modules/rabbitmq.py index 2563a4133d7..dcda51ad0e5 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/rabbitmq.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/rabbitmq.py @@ -22,7 +22,7 @@ async def handler_out_of_credits(app: FastAPI, data: bytes) -> bool: - message = WalletCreditsLimitReachedMessage.parse_raw(data) + message = WalletCreditsLimitReachedMessage.model_validate_json(data) scheduler: "DynamicSidecarsScheduler" = app.state.dynamic_sidecar_scheduler # type: ignore[name-defined] # noqa: F821 settings: AppSettings = app.state.settings diff --git a/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py b/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py index 16c249b3470..dd6c6adf2d8 100644 --- a/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py +++ b/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py @@ -25,8 +25,8 @@ def test_regression_as_label_data(scheduler_data: SchedulerData) -> None: # using pydantic's internals label_data = scheduler_data.as_label_data() - parsed_json_encoded = SchedulerData.parse_raw(json_encoded) - parsed_label_data = SchedulerData.parse_raw(label_data) + parsed_json_encoded = SchedulerData.model_validate_json(json_encoded) + parsed_label_data = SchedulerData.model_validate_json(label_data) assert parsed_json_encoded == parsed_label_data diff --git a/services/director-v2/tests/unit/test_schemas_dynamic_services_scheduler.py b/services/director-v2/tests/unit/test_schemas_dynamic_services_scheduler.py index 8d58d96f675..6347ebab5f4 100644 --- a/services/director-v2/tests/unit/test_schemas_dynamic_services_scheduler.py +++ b/services/director-v2/tests/unit/test_schemas_dynamic_services_scheduler.py @@ -40,11 +40,11 @@ def assert_copy_has_changes(original: SchedulerData) -> Iterator[SchedulerData]: async def test_parse_saved_fake_scheduler_data(fake_scheduler_data: str) -> None: - assert SchedulerData.parse_raw(fake_scheduler_data) + assert SchedulerData.model_validate_json(fake_scheduler_data) def test_nested_compare(fake_scheduler_data: str) -> None: - scheduler_data = SchedulerData.parse_raw(fake_scheduler_data) + scheduler_data = SchedulerData.model_validate_json(fake_scheduler_data) with assert_copy_has_changes(scheduler_data) as to_change: to_change.paths_mapping.inputs_path = Path("/tmp") diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py index 3c35009c3db..10bd1ba3a2f 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations_tasks.py @@ -180,7 +180,7 @@ async def test_get_task_logs_file( ) assert resp.status_code == status.HTTP_200_OK - log_file = TaskLogFileGet.parse_raw(resp.text) + log_file = TaskLogFileGet.model_validate_json(resp.text) assert log_file.download_link diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index 1d44e206929..410a086702d 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -726,7 +726,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.reset_mock() mocked_dask_client.get_task_result.assert_not_called() messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.model_validate_json ) assert messages[0].metrics == "service_started" assert messages[0].service_uuid == exp_started_task.node_id @@ -737,7 +737,7 @@ def _parser(x) -> RabbitResourceTrackingMessages: messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStartedMessage.parse_raw, + RabbitResourceTrackingStartedMessage.model_validate_json, ) assert messages[0].node_id == exp_started_task.node_id @@ -769,14 +769,14 @@ async def _return_random_task_result(job_id) -> TaskOutputData: expected_progress=1, ) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.model_validate_jsonidate_jsonidate_json ) assert messages[0].metrics == "service_stopped" assert messages[0].service_uuid == exp_started_task.node_id messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStoppedMessage.parse_raw, + RabbitResourceTrackingStoppedMessage.model_validate_json, ) completed_tasks = [exp_started_task] @@ -871,14 +871,14 @@ async def _return_2nd_task_running(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.reset_mock() mocked_dask_client.get_task_result.assert_not_called() messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.model_validate_json ) assert messages[0].metrics == "service_started" assert messages[0].service_uuid == exp_started_task.node_id messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStartedMessage.parse_raw, + RabbitResourceTrackingStartedMessage.model_validate_json, ) assert messages[0].node_id == exp_started_task.node_id @@ -915,14 +915,14 @@ async def _return_2nd_task_failed(job_ids: list[str]) -> list[DaskClientTaskStat mocked_parse_output_data_fct.assert_not_called() expected_pending_tasks.remove(exp_started_task) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.model_validate_json ) assert messages[0].metrics == "service_stopped" assert messages[0].service_uuid == exp_started_task.node_id messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStoppedMessage.parse_raw, + RabbitResourceTrackingStoppedMessage.model_validate_json, ) # ------------------------------------------------------------------------------- @@ -959,7 +959,7 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta ) mocked_dask_client.get_task_result.assert_called_once_with(exp_started_task.job_id) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 2, InstrumentationRabbitMessage.parse_raw + instrumentation_rabbit_client_parser, 2, InstrumentationRabbitMessage.model_validate_json ) # NOTE: the service was fast and went directly to success assert messages[0].metrics == "service_started" @@ -1328,7 +1328,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingStartedMessage.parse_raw, + RabbitResourceTrackingStartedMessage.model_validate_json, ) assert messages[0].node_id == exp_started_task.node_id @@ -1340,7 +1340,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingHeartbeatMessage.parse_raw, + RabbitResourceTrackingHeartbeatMessage.model_validate_json, ) assert isinstance(messages[0], RabbitResourceTrackingHeartbeatMessage) @@ -1352,7 +1352,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, - RabbitResourceTrackingHeartbeatMessage.parse_raw, + RabbitResourceTrackingHeartbeatMessage.model_validate_json, ) assert isinstance(messages[0], RabbitResourceTrackingHeartbeatMessage) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 7b00b23ebee..02facb9df44 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -729,7 +729,7 @@ async def test_update_scheduler_data_label( # fetch stored data in labels service_inspect = await async_docker_client.services.inspect(mock_service) labels = service_inspect["Spec"]["Labels"] - scheduler_data = SchedulerData.parse_raw( + scheduler_data = SchedulerData.model_validate_json( labels[DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL] ) assert scheduler_data == mock_scheduler_data From 7b73bcfca4598e8881d30efa48aa488b94f6a0d2 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 11:34:11 +0100 Subject: [PATCH 024/121] fix deprecated --- .../tests/unit/with_dbs/test_utils_rabbitmq.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py index ccd3f304a0a..0835852f899 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py @@ -129,7 +129,7 @@ async def test_publish_service_started_metrics( task=random.choice(tasks), # noqa: S311 ) await _assert_message_received( - mocked_message_parser, 1, InstrumentationRabbitMessage.parse_raw + mocked_message_parser, 1, InstrumentationRabbitMessage.model_validate_json ) @@ -154,7 +154,7 @@ async def test_publish_service_stopped_metrics( task_final_state=random.choice(list(RunningState)), # noqa: S311 ) await _assert_message_received( - mocked_message_parser, 1, InstrumentationRabbitMessage.parse_raw + mocked_message_parser, 1, InstrumentationRabbitMessage.model_validate_json ) @@ -207,7 +207,7 @@ async def test_publish_service_resource_tracking_started( ) after_publication_time = datetime.datetime.now(datetime.timezone.utc) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingStartedMessage.parse_raw + mocked_message_parser, 1, RabbitResourceTrackingStartedMessage.model_validate_json ) assert isinstance(received_messages[0], RabbitResourceTrackingStartedMessage) assert received_messages[0].service_run_id == random_service_run_id @@ -241,7 +241,7 @@ async def test_publish_service_resource_tracking_stopped( ) after_publication_time = datetime.datetime.now(datetime.timezone.utc) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingStoppedMessage.parse_raw + mocked_message_parser, 1, RabbitResourceTrackingStoppedMessage.model_validate_json ) assert isinstance(received_messages[0], RabbitResourceTrackingStoppedMessage) assert received_messages[0].service_run_id == random_service_run_id @@ -272,7 +272,7 @@ async def test_publish_service_resource_tracking_heartbeat( ) after_publication_time = datetime.datetime.now(datetime.timezone.utc) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingHeartbeatMessage.parse_raw + mocked_message_parser, 1, RabbitResourceTrackingHeartbeatMessage.model_validate_json ) assert isinstance(received_messages[0], RabbitResourceTrackingHeartbeatMessage) assert received_messages[0].service_run_id == random_service_run_id From e0047c4023f5c84693fe775ef7008a30c139d477 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 13:16:54 +0100 Subject: [PATCH 025/121] fix required --- .../src/models_library/api_schemas_directorv2/clusters.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index 7889f68196b..301ea4181ec 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -126,6 +126,7 @@ class ClusterCreate(BaseCluster): "name": "My awesome cluster", "type": ClusterTypeInModel.ON_PREMISE, "endpoint": "https://registry.osparc-development.fake.dev", + "owner": None, "authentication": { "type": "simple", "username": "someuser", From 405394644318cc1a48140110a2409fb79e73242d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 13:59:55 +0100 Subject: [PATCH 026/121] fix settings --- .../src/simcore_service_director_v2/core/settings.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index ffd5332d642..81f9984eb58 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -30,6 +30,7 @@ ) from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings +from settings_library.base import BaseCustomSettings from settings_library.catalog import CatalogSettings from settings_library.docker_registry import RegistrySettings from settings_library.http_client_request import ClientRequestSettings @@ -52,7 +53,7 @@ from .dynamic_services_settings import DynamicServicesSettings -class DirectorV0Settings(BaseApplicationSettings): +class DirectorV0Settings(BaseCustomSettings): DIRECTOR_V0_ENABLED: bool = True DIRECTOR_HOST: str = "director" @@ -74,7 +75,7 @@ def endpoint(self) -> str: return url -class ComputationalBackendSettings(BaseApplicationSettings): +class ComputationalBackendSettings(BaseCustomSettings): COMPUTATIONAL_BACKEND_ENABLED: bool = Field( default=True, ) @@ -126,10 +127,6 @@ def _empty_auth_is_none(cls, v): class AppSettings(BaseApplicationSettings, MixinLoggingSettings): - # docker environs - SC_BOOT_MODE: BootModeEnum - SC_BOOT_TARGET: BuildTargetEnum | None - LOG_LEVEL: LogLevel = Field( LogLevel.INFO.value, validation_alias=AliasChoices("DIRECTOR_V2_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), From 722d6b9fb5573788c1926ece452009d56e438f08 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 16:00:04 +0100 Subject: [PATCH 027/121] fix json --- .../src/models_library/service_settings_labels.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 0a632a7b70d..511a3e7704b 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -3,7 +3,7 @@ from enum import Enum from functools import cached_property from pathlib import Path -from typing import Any, Literal, TypeAlias +from typing import Annotated, Any, Literal, TypeAlias from common_library.json_serialization import json_dumps from pydantic import ( @@ -267,7 +267,7 @@ class RestartPolicy(str, Enum): class DynamicSidecarServiceLabels(BaseModel): """All "simcore.service.*" labels including keys""" - paths_mapping: Json[PathMappingsLabel] | None = Field( + paths_mapping: Annotated[PathMappingsLabel | None, Json]= Field( None, alias="simcore.service.paths-mapping", description=( @@ -276,7 +276,7 @@ class DynamicSidecarServiceLabels(BaseModel): ), ) - compose_spec: Json[ComposeSpecLabelDict] | None = Field( + compose_spec: Annotated[ComposeSpecLabelDict | None, Json] = Field( None, alias="simcore.service.compose-spec", description=( @@ -317,21 +317,19 @@ class DynamicSidecarServiceLabels(BaseModel): ), ) - containers_allowed_outgoing_permit_list: None | ( - Json[dict[str, list[NATRule]]] - ) = Field( + containers_allowed_outgoing_permit_list: Annotated[None | dict[str, list[NATRule]], Json] = Field( None, alias="simcore.service.containers-allowed-outgoing-permit-list", description="allow internet access to certain domain names and ports per container", ) - containers_allowed_outgoing_internet: Json[set[str]] | None = Field( + containers_allowed_outgoing_internet: Annotated[set[str] | None, Json] = Field( None, alias="simcore.service.containers-allowed-outgoing-internet", description="allow complete internet access to containers in here", ) - callbacks_mapping: Json[CallbacksMapping] | None = Field( + callbacks_mapping: Annotated[CallbacksMapping | None, Json] = Field( default_factory=CallbacksMapping, # type: ignore[arg-type] # this one ANE I am not sure about alias="simcore.service.callbacks-mapping", description="exposes callbacks from user services to the sidecar", From 9293a26d86ef8d32073e3e0b64277182b2832d68 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 16:30:16 +0100 Subject: [PATCH 028/121] fix pricing_info --- services/director-v2/tests/mocks/fake_task.json | 1 + 1 file changed, 1 insertion(+) diff --git a/services/director-v2/tests/mocks/fake_task.json b/services/director-v2/tests/mocks/fake_task.json index b26ebfa9ba5..eb8cc71db76 100644 --- a/services/director-v2/tests/mocks/fake_task.json +++ b/services/director-v2/tests/mocks/fake_task.json @@ -61,6 +61,7 @@ "created": "1961-07-06T11:24:30.877Z", "modified": "2008-03-24T07:02:09.279Z", "last_heartbeat": null, + "pricing_info": null, "hardware_info": { "aws_ec2_instances": [] } From 152148c0479cb6fb10e0d39135c082b2c8033895 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 6 Nov 2024 16:32:50 +0100 Subject: [PATCH 029/121] Revert "fix json" This reverts commit 722d6b9fb5573788c1926ece452009d56e438f08. --- .../src/models_library/service_settings_labels.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 511a3e7704b..0a632a7b70d 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -3,7 +3,7 @@ from enum import Enum from functools import cached_property from pathlib import Path -from typing import Annotated, Any, Literal, TypeAlias +from typing import Any, Literal, TypeAlias from common_library.json_serialization import json_dumps from pydantic import ( @@ -267,7 +267,7 @@ class RestartPolicy(str, Enum): class DynamicSidecarServiceLabels(BaseModel): """All "simcore.service.*" labels including keys""" - paths_mapping: Annotated[PathMappingsLabel | None, Json]= Field( + paths_mapping: Json[PathMappingsLabel] | None = Field( None, alias="simcore.service.paths-mapping", description=( @@ -276,7 +276,7 @@ class DynamicSidecarServiceLabels(BaseModel): ), ) - compose_spec: Annotated[ComposeSpecLabelDict | None, Json] = Field( + compose_spec: Json[ComposeSpecLabelDict] | None = Field( None, alias="simcore.service.compose-spec", description=( @@ -317,19 +317,21 @@ class DynamicSidecarServiceLabels(BaseModel): ), ) - containers_allowed_outgoing_permit_list: Annotated[None | dict[str, list[NATRule]], Json] = Field( + containers_allowed_outgoing_permit_list: None | ( + Json[dict[str, list[NATRule]]] + ) = Field( None, alias="simcore.service.containers-allowed-outgoing-permit-list", description="allow internet access to certain domain names and ports per container", ) - containers_allowed_outgoing_internet: Annotated[set[str] | None, Json] = Field( + containers_allowed_outgoing_internet: Json[set[str]] | None = Field( None, alias="simcore.service.containers-allowed-outgoing-internet", description="allow complete internet access to containers in here", ) - callbacks_mapping: Annotated[CallbacksMapping | None, Json] = Field( + callbacks_mapping: Json[CallbacksMapping] | None = Field( default_factory=CallbacksMapping, # type: ignore[arg-type] # this one ANE I am not sure about alias="simcore.service.callbacks-mapping", description="exposes callbacks from user services to the sidecar", From 25d86578d9a0ecb054b28fdc623ee78cbcd03283 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 09:35:32 +0100 Subject: [PATCH 030/121] fix default --- .../src/models_library/service_settings_labels.py | 2 +- .../src/simcore_service_director_v2/core/settings.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 0a632a7b70d..69edad3cb95 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -276,7 +276,7 @@ class DynamicSidecarServiceLabels(BaseModel): ), ) - compose_spec: Json[ComposeSpecLabelDict] | None = Field( + compose_spec: Json[ComposeSpecLabelDict | None] = Field( None, alias="simcore.service.compose-spec", description=( diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 81f9984eb58..45a1c3b2f33 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -172,7 +172,7 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings): DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True DIRECTOR_V2_PROFILING: bool = False - DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None + DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = Field(default=None) # extras SWARM_STACK_NAME: str = Field( @@ -244,6 +244,7 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings): description="settings for the private registry deployed with the platform", ) DIRECTOR_V2_DOCKER_HUB_REGISTRY: RegistrySettings | None = Field( + default=None, description="public DockerHub registry settings" ) From 0a39fd0c5591c7d1f4fbf213cc52591385ceabf7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 09:57:38 +0100 Subject: [PATCH 031/121] continue fixing --- .../settings-library/src/settings_library/basic_types.py | 8 ++++---- .../src/simcore_service_director_v2/core/application.py | 5 +++-- .../core/dynamic_services_settings/scheduler.py | 4 ++-- services/director-v2/tests/unit/test_core_settings.py | 4 ++-- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/packages/settings-library/src/settings_library/basic_types.py b/packages/settings-library/src/settings_library/basic_types.py index f4c745b2284..9d1d440d5ae 100644 --- a/packages/settings-library/src/settings_library/basic_types.py +++ b/packages/settings-library/src/settings_library/basic_types.py @@ -3,7 +3,7 @@ # This is a minor evil to avoid the maintenance burden that creates # an extra dependency to a larger models_library (intra-repo library) -from enum import Enum +from enum import StrEnum from typing import Annotated, TypeAlias from pydantic import Field, StringConstraints @@ -16,14 +16,14 @@ VersionTag: TypeAlias = Annotated[str, StringConstraints(pattern=r"^v\d$")] -class LogLevel(str, Enum): +class LogLevel(StrEnum): DEBUG = "DEBUG" INFO = "INFO" WARNING = "WARNING" ERROR = "ERROR" -class BootMode(str, Enum): +class BootMode(StrEnum): """ Values taken by SC_BOOT_MODE environment variable set in Dockerfile and used during docker/boot.sh @@ -36,7 +36,7 @@ class BootMode(str, Enum): DEVELOPMENT = "development" -class BuildTargetEnum(str, Enum): +class BuildTargetEnum(StrEnum): """ Values taken by SC_BUILD_TARGET environment variable set in Dockerfile that defines the stage targeted in the diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 7ae38ad4159..00cc92ac748 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -9,6 +9,7 @@ from servicelib.fastapi.profiler_middleware import ProfilerMiddleware from servicelib.fastapi.tracing import setup_tracing from servicelib.logging_utils import config_all_loggers +from settings_library.basic_types import BootMode from .._meta import API_VERSION, API_VTAG, APP_NAME, PROJECT_NAME, SUMMARY from ..api.entrypoints import api_router @@ -127,12 +128,12 @@ def create_base_app(settings: AppSettings | None = None) -> FastAPI: logging.getLogger(name).setLevel(quiet_level) app = FastAPI( - debug=settings.SC_BOOT_MODE.is_devel_mode(), + debug=settings.SC_BOOT_MODE == BootMode.DEVELOPMENT, title=PROJECT_NAME, description=SUMMARY, version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", - **get_common_oas_options(settings.SC_BOOT_MODE.is_devel_mode()), + **get_common_oas_options(settings.SC_BOOT_MODE == BootMode.DEVELOPMENT), ) override_fastapi_openapi_method(app) app.state.settings = settings diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py index 74810cdd101..5072a365af6 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/scheduler.py @@ -103,8 +103,8 @@ class DynamicServicesSchedulerSettings(BaseCustomSettings): ), ) - DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: PositiveFloat = Field( - 60.0 * _MINUTE, + DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: timedelta = Field( + timedelta(hours=1), description=( "When saving and restoring the state of a dynamic service, depending on the payload " "some services take longer or shorter to save and restore. Across the " diff --git a/services/director-v2/tests/unit/test_core_settings.py b/services/director-v2/tests/unit/test_core_settings.py index 84d99057f3a..155dd01af54 100644 --- a/services/director-v2/tests/unit/test_core_settings.py +++ b/services/director-v2/tests/unit/test_core_settings.py @@ -43,7 +43,7 @@ def test_enforce_r_clone_requirement(monkeypatch: pytest.MonkeyPatch) -> None: def test_settings_with_project_env_devel(project_env_devel_environment: dict[str, Any]): # loads from environ settings = AppSettings.create_from_envs() - print("captured settings: \n", settings.json(indent=2)) + print("captured settings: \n", settings.model_dump_json(indent=2)) assert settings.SC_BOOT_MODE == BootModeEnum.DEBUG assert settings.LOG_LEVEL == LogLevel.DEBUG @@ -60,7 +60,7 @@ def test_settings_with_repository_env_devel( ) # defined in docker-compose settings = AppSettings.create_from_envs() - print("captured settings: \n", settings.json(indent=2)) + print("captured settings: \n", settings.model_dump_json(indent=2)) assert settings From 86318f537e20502ceba5e556d7237245f5e60b2a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 10:11:43 +0100 Subject: [PATCH 032/121] fix url --- .../modules/dynamic_sidecar/api_client/_thin.py | 6 +++--- .../unit/test_modules_dynamic_sidecar_client_api_thin.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index 241f32fe70e..1a6a7e24256 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -63,13 +63,13 @@ def _get_url( no_api_version: bool = False, ) -> str: """formats and returns an url for the request""" - api_version = "" if no_api_version else f"/{self.API_VERSION}" + api_version = "" if no_api_version else f"{self.API_VERSION}/" return f"{dynamic_sidecar_endpoint}{api_version}{postfix}" async def _get_health_common( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/health", no_api_version=True) + url = self._get_url(dynamic_sidecar_endpoint, "health", no_api_version=True) return await self.client.get(url, timeout=self._health_request_timeout) @retry_on_errors() @@ -88,7 +88,7 @@ async def get_health_no_retry( async def get_containers( self, dynamic_sidecar_endpoint: AnyHttpUrl, *, only_status: bool ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers") + url = self._get_url(dynamic_sidecar_endpoint, "containers") return await self.client.get(url, params={"only_status": only_status}) @retry_on_errors() diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index cf4e0bc6e00..8482dc4867e 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -116,7 +116,7 @@ async def test_get_containers( mock_response = Response(status.HTTP_200_OK) mock_request( "GET", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers?only_status={str(only_status).lower()}", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers?only_status={str(only_status).lower()}", mock_response, None, ) From 16538e355b2cc3216f12350498aba47f35580da1 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 10:14:11 +0100 Subject: [PATCH 033/121] fix url --- .../dynamic_sidecar/api_client/_thin.py | 40 +++++++++---------- ...modules_dynamic_sidecar_client_api_thin.py | 2 +- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index 1a6a7e24256..81319df3bbf 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -100,7 +100,7 @@ async def patch_containers_ports_io( enable_outputs: bool, enable_inputs: bool, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/io") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/io") return await self.client.patch( url, json={"enable_outputs": enable_outputs, "enable_inputs": enable_inputs} ) @@ -110,7 +110,7 @@ async def patch_containers_ports_io( async def post_containers_ports_outputs_dirs( self, dynamic_sidecar_endpoint: AnyHttpUrl, *, outputs_labels: dict[str, Any] ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs/dirs") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs/dirs") return await self.client.post(url, json={"outputs_labels": outputs_labels}) @retry_on_errors() @@ -125,7 +125,7 @@ async def get_containers_name( } ) url = self._get_url( - dynamic_sidecar_endpoint, f"/containers/name?filters={filters}" + dynamic_sidecar_endpoint, f"containers/name?filters={filters}" ) return await self.client.get(url=url) @@ -140,7 +140,7 @@ async def post_containers_networks_attach( network_aliases: list[str], ) -> Response: url = self._get_url( - dynamic_sidecar_endpoint, f"/containers/{container_id}/networks:attach" + dynamic_sidecar_endpoint, f"containers/{container_id}/networks:attach" ) return await self.client.post( url, @@ -158,7 +158,7 @@ async def post_containers_networks_detach( network_id: str, ) -> Response: url = self._get_url( - dynamic_sidecar_endpoint, f"/containers/{container_id}/networks:detach" + dynamic_sidecar_endpoint, f"containers/{container_id}/networks:detach" ) return await self.client.post( url, @@ -174,7 +174,7 @@ async def post_containers_compose_spec( *, compose_spec: str, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/compose-spec") + url = self._get_url(dynamic_sidecar_endpoint, "containers/compose-spec") return await self.client.post(url, json={"docker_compose_yaml": compose_spec}) @retry_on_errors() @@ -185,9 +185,9 @@ async def post_containers_tasks( *, metrics_params: CreateServiceMetricsAdditionalParams, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers") + url = self._get_url(dynamic_sidecar_endpoint, "containers") return await self.client.post( - url, json={"metrics_params": metrics_params.dict()} + url, json={"metrics_params": metrics_params.model_dump()} ) @retry_on_errors() @@ -195,7 +195,7 @@ async def post_containers_tasks( async def post_containers_tasks_down( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers:down") + url = self._get_url(dynamic_sidecar_endpoint, "containers:down") return await self.client.post(url) @retry_on_errors() @@ -203,7 +203,7 @@ async def post_containers_tasks_down( async def post_containers_tasks_state_restore( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/state:restore") + url = self._get_url(dynamic_sidecar_endpoint, "containers/state:restore") return await self.client.post(url) @retry_on_errors() @@ -211,7 +211,7 @@ async def post_containers_tasks_state_restore( async def post_containers_tasks_state_save( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/state:save") + url = self._get_url(dynamic_sidecar_endpoint, "containers/state:save") return await self.client.post(url) @retry_on_errors() @@ -219,7 +219,7 @@ async def post_containers_tasks_state_save( async def post_containers_images_pull( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/images:pull") + url = self._get_url(dynamic_sidecar_endpoint, "containers/images:pull") return await self.client.post(url) @retry_on_errors() @@ -230,7 +230,7 @@ async def post_containers_tasks_ports_inputs_pull( port_keys: list[str] | None = None, ) -> Response: port_keys = [] if port_keys is None else port_keys - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/inputs:pull") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/inputs:pull") return await self.client.post(url, json=port_keys) @retry_on_errors() @@ -241,7 +241,7 @@ async def post_containers_tasks_ports_outputs_pull( port_keys: list[str] | None = None, ) -> Response: port_keys = [] if port_keys is None else port_keys - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs:pull") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs:pull") return await self.client.post(url, json=port_keys) @retry_on_errors() @@ -249,7 +249,7 @@ async def post_containers_tasks_ports_outputs_pull( async def post_containers_tasks_ports_outputs_push( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs:push") + url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs:push") return await self.client.post(url) @retry_on_errors() @@ -257,7 +257,7 @@ async def post_containers_tasks_ports_outputs_push( async def post_containers_tasks_restart( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers:restart") + url = self._get_url(dynamic_sidecar_endpoint, "containers:restart") return await self.client.post(url) @retry_on_errors() @@ -268,7 +268,7 @@ async def put_volumes( volume_category: VolumeCategory, volume_status: VolumeStatus, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, f"/volumes/{volume_category}") + url = self._get_url(dynamic_sidecar_endpoint, f"volumes/{volume_category}") return await self.client.put(url, json={"status": volume_status}) @@ -277,7 +277,7 @@ async def put_volumes( async def proxy_config_load( self, proxy_endpoint: AnyHttpUrl, proxy_configuration: dict[str, Any] ) -> Response: - url = self._get_url(proxy_endpoint, "/load", no_api_version=True) + url = self._get_url(proxy_endpoint, "load", no_api_version=True) return await self.client.post(url, json=proxy_configuration) @retry_on_errors() @@ -286,7 +286,7 @@ async def get_containers_activity( self, dynamic_sidecar_endpoint: AnyHttpUrl, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/containers/activity") + url = self._get_url(dynamic_sidecar_endpoint, "containers/activity") return await self.client.get(url) @retry_on_errors() @@ -295,5 +295,5 @@ async def post_disk_reserved_free( self, dynamic_sidecar_endpoint: AnyHttpUrl, ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/disk/reserved:free") + url = self._get_url(dynamic_sidecar_endpoint, "disk/reserved:free") return await self.client.post(url) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index 8482dc4867e..181093ffd6e 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -139,7 +139,7 @@ async def test_post_patch_containers_ports_io( mock_response = Response(status.HTTP_204_NO_CONTENT) mock_request( "PATCH", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/ports/io", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/ports/io", mock_response, None, ) From 037d651b3da7d2cde159e52a2f325f777f7b5a31 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 10:17:30 +0100 Subject: [PATCH 034/121] fix url --- ...test_modules_dynamic_sidecar_client_api_thin.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index 181093ffd6e..6a3f7a21255 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -162,7 +162,7 @@ async def test_post_containers_ports_outputs_dirs( mock_response = Response(status.HTTP_204_NO_CONTENT) mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/ports/outputs/dirs", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/ports/outputs/dirs", mock_response, None, ) @@ -216,7 +216,7 @@ async def test_post_containers_networks_attach( container_id = "a_container_id" mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/{container_id}/networks:attach", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/{container_id}/networks:attach", mock_response, None, ) @@ -239,7 +239,7 @@ async def test_post_containers_networks_detach( container_id = "a_container_id" mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/{container_id}/networks:detach", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/{container_id}/networks:detach", mock_response, None, ) @@ -262,7 +262,7 @@ async def test_put_volumes( mock_response = Response(status.HTTP_204_NO_CONTENT) mock_request( "PUT", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/volumes/{volume_category}", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/volumes/{volume_category}", mock_response, None, ) @@ -371,7 +371,7 @@ async def test_get_containers_inactivity( mock_response = Response(status.HTTP_200_OK, json={}) mock_request( "GET", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/activity", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/activity", mock_response, None, ) @@ -388,7 +388,7 @@ async def test_post_disk_reserved_free( mock_response = Response(status.HTTP_204_NO_CONTENT) mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/disk/reserved:free", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/disk/reserved:free", mock_response, None, ) @@ -405,7 +405,7 @@ async def test_post_containers_compose_spec( mock_response = Response(status.HTTP_202_ACCEPTED) mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/compose-spec", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}/containers/compose-spec", mock_response, None, ) From 4c73ea0b42ffd32c107b377120a96d435e12056c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 10:39:56 +0100 Subject: [PATCH 035/121] fix urls --- .../unit/test_modules_dynamic_sidecar_client_api_thin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py index 6a3f7a21255..6584020dcb6 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -191,7 +191,7 @@ async def test_get_containers_name( mock_request( "GET", ( - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}" + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}" f"/containers/name?filters={encoded_filters}" ), mock_response, @@ -353,7 +353,7 @@ async def test_post_containers_tasks( mock_response = Response(status.HTTP_202_ACCEPTED, json="mocked_task_id") mock_request( "POST", - f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}{mock_endpoint}", + f"{dynamic_sidecar_endpoint}{thin_client.API_VERSION}{mock_endpoint}", mock_response, None, ) From eed9e89eb6707847015599a834e1317e11be4fd2 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 10:42:57 +0100 Subject: [PATCH 036/121] fix dateformat --- .../simcore_service_director_v2/models/comp_runs.py | 12 ++++++------ .../simcore_service_director_v2/models/comp_tasks.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index c14d6d6a547..7fa95a2a176 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -94,8 +94,8 @@ def convert_null_to_empty_metadata(cls, v): "cluster_id": 0, "iteration": 42, "result": "NOT_STARTED", - "created": "2021-03-01 13:07:34.19161", - "modified": "2021-03-01 13:07:34.19161", + "created": "2021-03-01T13:07:34.19161", + "modified": "2021-03-01T13:07:34.19161", "use_on_demand_clusters": False, }, { @@ -105,10 +105,10 @@ def convert_null_to_empty_metadata(cls, v): "cluster_id": 123, "iteration": 12, "result": "SUCCESS", - "created": "2021-03-01 13:07:34.19161", - "modified": "2021-03-01 13:07:34.19161", - "started": "2021-03-01 8:07:34.19161", - "ended": "2021-03-01 13:07:34.10", + "created": "2021-03-01T13:07:34.19161", + "modified": "2021-03-01T13:07:34.19161", + "started": "2021-03-01T08:07:34.19161", + "ended": "2021-03-01T13:07:34.10", "metadata": { "node_id_names_map": {}, "product_name": "osparc", diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 6af64d2d84f..41a3b0701b8 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -225,7 +225,7 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: } }, "image": image_example, - "submit": "2021-03-01 13:07:34.19161", + "submit": "2021-03-01T13:07:34.19161", "node_class": "INTERACTIVE", "state": "NOT_STARTED", "progress": 0.44, From bf09d1c793c713d6f9a1b2fcb906a62944692e03 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 10:46:16 +0100 Subject: [PATCH 037/121] fix --- .../src/simcore_service_director_v2/models/comp_runs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index 7fa95a2a176..2a1bc1e7c9c 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -94,7 +94,7 @@ def convert_null_to_empty_metadata(cls, v): "cluster_id": 0, "iteration": 42, "result": "NOT_STARTED", - "created": "2021-03-01T13:07:34.19161", + "created": "2021-03-01T12:07:34.19161", "modified": "2021-03-01T13:07:34.19161", "use_on_demand_clusters": False, }, @@ -106,7 +106,7 @@ def convert_null_to_empty_metadata(cls, v): "iteration": 12, "result": "SUCCESS", "created": "2021-03-01T13:07:34.19161", - "modified": "2021-03-01T13:07:34.19161", + "modified": "2021-03-01T14:07:34.19161", "started": "2021-03-01T08:07:34.19161", "ended": "2021-03-01T13:07:34.10", "metadata": { From 4c8d6f585d2baa1d117298fa2452bdd4e998f58e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 11:09:18 +0100 Subject: [PATCH 038/121] fix match --- .../tests/unit/test_modules_dynamic_sidecar_scheduler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py index 13763953d07..84711cf0936 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py @@ -529,9 +529,9 @@ async def test_mark_all_services_in_wallet_for_removal( wallet_id = scheduler_data.wallet_info.wallet_id can_remove = scheduler_data.dynamic_sidecar.service_removal_state.can_remove match wallet_id: - case WalletID(1): + case 1: assert can_remove is True - case WalletID(2): + case 2: assert can_remove is False case _: pytest.fail("unexpected case") From 32be27ed3965ea2a9443ba09ddd05a58a442e18e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 11:31:58 +0100 Subject: [PATCH 039/121] fix --- .../models_library/api_schemas_directorv2/clusters.py | 11 +++++------ .../director-v2/tests/unit/test_models_clusters.py | 6 +++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index 301ea4181ec..3f86e18d30b 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -1,4 +1,4 @@ -from typing import TypeAlias +from typing import Any, TypeAlias from pydantic import ( AnyHttpUrl, @@ -48,13 +48,12 @@ class WorkerMetrics(BaseModel): class UsedResources(DictModel[str, NonNegativeFloat]): @model_validator(mode="before") @classmethod - def ensure_negative_value_is_zero(cls, values): + def ensure_negative_value_is_zero(cls, values: dict[str, Any]): # dasks adds/remove resource values and sometimes # they end up being negative instead of 0 - if v := values.get("__root__", {}): - for res_key, res_value in v.items(): - if res_value < 0: - v[res_key] = 0 + for res_key, res_value in values.items(): + if res_value < 0: + values[res_key] = 0 return values diff --git a/services/director-v2/tests/unit/test_models_clusters.py b/services/director-v2/tests/unit/test_models_clusters.py index ac65d24609a..b08a988fc68 100644 --- a/services/director-v2/tests/unit/test_models_clusters.py +++ b/services/director-v2/tests/unit/test_models_clusters.py @@ -13,7 +13,7 @@ WorkerMetrics, ) from models_library.clusters import ClusterTypeInModel -from pydantic import BaseModel, TypeAdapter +from pydantic import BaseModel, ByteSize, TypeAdapter from simcore_postgres_database.models.clusters import ClusterType @@ -61,11 +61,11 @@ def test_scheduler_constructor_with_no_workers_has_correct_dict(faker: Faker): def test_worker_constructor_corrects_negative_used_resources(faker: Faker): worker = Worker( - id=faker.pyint(min_value=1), + id=f"{faker.pyint(min_value=1)}", name=faker.name(), resources=TypeAdapter(AvailableResources).validate_python({}), used_resources=TypeAdapter(UsedResources).validate_python({"CPU": -0.0000234}), - memory_limit=faker.pyint(min_value=1), + memory_limit=ByteSize(faker.pyint(min_value=1)), metrics=WorkerMetrics.model_validate( { "cpu": faker.pyfloat(min_value=0), From d2a18b7bb10d31386c5e53f8fdd24d0dd8931573 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 11:39:42 +0100 Subject: [PATCH 040/121] fix serialization --- .../docker_service_specs/sidecar.py | 33 ++++++++++++------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index e8206ced2b8..13f13ad28c5 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -3,6 +3,7 @@ from typing import Any, NamedTuple from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets from models_library.aiodocker_api import AioDockerServiceSpec from models_library.basic_types import BootModeEnum, PortInt from models_library.callbacks_mapping import CallbacksMapping @@ -19,9 +20,6 @@ from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager from servicelib.utils import unused_port -from settings_library.aws_s3_cli import AwsS3CliSettings -from settings_library.docker_registry import RegistrySettings -from settings_library.utils_encoders import create_json_encoder_wo_secrets from ....constants import DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL from ....core.dynamic_services_settings.scheduler import ( @@ -101,8 +99,11 @@ def _get_environment_variables( app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS and app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS.AWS_S3_CLI_S3 ): - dy_sidecar_aws_s3_cli_settings = app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS.json( - encoder=create_json_encoder_wo_secrets(AwsS3CliSettings), + dy_sidecar_aws_s3_cli_settings = json_dumps( + model_dump_with_secrets( + app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_AWS_S3_CLI_SETTINGS, + show_secrets=True, + ) ) state_exclude = set() @@ -133,7 +134,7 @@ def _get_environment_variables( "DY_SIDECAR_USER_SERVICES_HAVE_INTERNET_ACCESS": f"{allow_internet_access}", "DY_SIDECAR_SYSTEM_MONITOR_TELEMETRY_ENABLE": f"{telemetry_enabled}", "DY_SIDECAR_STATE_EXCLUDE": json_dumps(f"{x}" for x in state_exclude), - "DY_SIDECAR_CALLBACKS_MAPPING": callbacks_mapping.json(), + "DY_SIDECAR_CALLBACKS_MAPPING": callbacks_mapping.model_dump_json(), "DY_SIDECAR_STATE_PATHS": json_dumps( f"{x}" for x in scheduler_data.paths_mapping.state_paths ), @@ -157,14 +158,22 @@ def _get_environment_variables( "RABBIT_PORT": f"{rabbit_settings.RABBIT_PORT}", "RABBIT_USER": f"{rabbit_settings.RABBIT_USER}", "RABBIT_SECURE": f"{rabbit_settings.RABBIT_SECURE}", - "DY_DEPLOYMENT_REGISTRY_SETTINGS": app_settings.DIRECTOR_V2_DOCKER_REGISTRY.json( - encoder=create_json_encoder_wo_secrets(RegistrySettings), - exclude={"resolved_registry_url", "api_url"}, + "DY_DEPLOYMENT_REGISTRY_SETTINGS": ( + json_dumps( + model_dump_with_secrets( + app_settings.DIRECTOR_V2_DOCKER_REGISTRY, + show_secrets=True, + exclude={"resolved_registry_url", "api_url"}, + ) + ) ), "DY_DOCKER_HUB_REGISTRY_SETTINGS": ( - app_settings.DIRECTOR_V2_DOCKER_HUB_REGISTRY.json( - encoder=create_json_encoder_wo_secrets(RegistrySettings), - exclude={"resolved_registry_url", "api_url"}, + json_dumps( + model_dump_with_secrets( + app_settings.DIRECTOR_V2_DOCKER_HUB_REGISTRY, + show_secrets=True, + exclude={"resolved_registry_url", "api_url"}, + ) ) if app_settings.DIRECTOR_V2_DOCKER_HUB_REGISTRY else "null" From 9dac7442e794eebe9101d6b23994f6c82b2a84c9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 11:52:49 +0100 Subject: [PATCH 041/121] fix encoder --- .../src/simcore_service_director_v2/utils/db.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/db.py b/services/director-v2/src/simcore_service_director_v2/utils/db.py index b4240a1289b..416157e70f9 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/db.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/db.py @@ -1,7 +1,7 @@ -import json import logging from typing import Any +from common_library.serialization import model_dump_with_secrets from fastapi import FastAPI from models_library.clusters import BaseCluster from models_library.projects_state import RunningState @@ -30,14 +30,13 @@ def to_clusters_db(cluster: BaseCluster, *, only_update: bool) -> dict[str, Any]: - db_model: dict[str, Any] = json.loads( - cluster.json( - by_alias=True, - exclude={"id", "access_rights"}, - exclude_unset=only_update, - exclude_none=only_update, - encoder=create_json_encoder_wo_secrets(BaseCluster), - ) + db_model: dict[str, Any] = model_dump_with_secrets( + cluster, + show_secrets=True, + by_alias=True, + exclude={"id", "access_rights"}, + exclude_unset=only_update, + exclude_none=only_update, ) return db_model From 1ce47664e7333b284a0537de6678cc00cc73d52e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 12:00:01 +0100 Subject: [PATCH 042/121] fix import --- services/director-v2/src/simcore_service_director_v2/utils/db.py | 1 - 1 file changed, 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/db.py b/services/director-v2/src/simcore_service_director_v2/utils/db.py index 416157e70f9..af944c11dff 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/db.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/db.py @@ -5,7 +5,6 @@ from fastapi import FastAPI from models_library.clusters import BaseCluster from models_library.projects_state import RunningState -from settings_library.utils_encoders import create_json_encoder_wo_secrets from simcore_postgres_database.models.comp_pipeline import StateType from ..api.dependencies.database import RepoType, get_base_repository From 0b7e719aba1182407691d2eb5442f9e1b5bd1221 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 12:35:44 +0100 Subject: [PATCH 043/121] fix --- .../common-library/src/common_library/serialization.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index 964dfc01ef8..81245bdedd7 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -1,11 +1,9 @@ from datetime import timedelta -from typing import Any +from typing import Any, get_origin from pydantic import BaseModel, SecretStr from pydantic_core import Url -from .pydantic_fields_extension import get_type - def model_dump_with_secrets( settings_obj: BaseModel, *, show_secrets: bool, **pydantic_export_options @@ -31,7 +29,9 @@ def model_dump_with_secrets( data[field_name] = str(field_data) elif isinstance(field_data, dict): - field_type = get_type(settings_obj.model_fields[field_name]) + field_type = get_origin(settings_obj.model_fields[field_name].annotation) + if not field_type: + break if issubclass(field_type, BaseModel): data[field_name] = model_dump_with_secrets( field_type.model_validate(field_data), From a34764d17000c4d49eb4dd9150726e78f786da5b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 12:39:31 +0100 Subject: [PATCH 044/121] fix --- packages/common-library/src/common_library/serialization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index 81245bdedd7..ce128ff6df0 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -31,7 +31,7 @@ def model_dump_with_secrets( elif isinstance(field_data, dict): field_type = get_origin(settings_obj.model_fields[field_name].annotation) if not field_type: - break + continue if issubclass(field_type, BaseModel): data[field_name] = model_dump_with_secrets( field_type.model_validate(field_data), From 0302b4be89d06c2c191f13cd670ebf8fc3041e91 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 12:46:20 +0100 Subject: [PATCH 045/121] reduce complexity --- .../common-library/src/common_library/serialization.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index ce128ff6df0..b201f4115a8 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -20,19 +20,15 @@ def model_dump_with_secrets( data[field_name] = field_data.total_seconds() elif isinstance(field_data, SecretStr): - if show_secrets: - data[field_name] = field_data.get_secret_value() - else: - data[field_name] = str(field_data) + data[field_name] = field_data.get_secret_value() if show_secrets else str(field_data) + elif isinstance(field_data, Url): data[field_name] = str(field_data) elif isinstance(field_data, dict): field_type = get_origin(settings_obj.model_fields[field_name].annotation) - if not field_type: - continue - if issubclass(field_type, BaseModel): + if field_type and issubclass(field_type, BaseModel): data[field_name] = model_dump_with_secrets( field_type.model_validate(field_data), show_secrets=show_secrets, From 92a40dea04f29426a0f224ab41d80e3a8cd59a6b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 13:23:21 +0100 Subject: [PATCH 046/121] continue fixing --- packages/models-library/src/models_library/docker.py | 8 +------- services/director-v2/tests/unit/test_core_settings.py | 3 ++- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py index b8134b3ec73..6e87f06b62e 100644 --- a/packages/models-library/src/models_library/docker.py +++ b/packages/models-library/src/models_library/docker.py @@ -37,13 +37,7 @@ def from_key(cls, key: str) -> "DockerLabelKey": str, StringConstraints(pattern=DOCKER_GENERIC_TAG_KEY_RE) ] - -class DockerPlacementConstraint(ConstrainedStr): - strip_whitespace = True - regex = re.compile( - r"^(?!-)(?![.])(?!.*--)(?!.*[.][.])[a-zA-Z0-9.-]*(? None: monkeypatch.setenv("DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS", custom_constraints) - with pytest.raises(Exception): + with pytest.raises(DefaultFromEnvFactoryError): AppSettings.create_from_envs() From 250a9c0492bbec0bdd2d7e34bff571ab87ead1fb Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 13:51:52 +0100 Subject: [PATCH 047/121] continue fixing --- .../unit/test_modules_dask_clients_pool.py | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py index f9e8b1f13b7..bf3f0787367 100644 --- a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py +++ b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py @@ -7,6 +7,8 @@ from typing import Any, AsyncIterator, Callable, get_args from unittest import mock +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets import pytest from _dask_helpers import DaskGatewayServer from distributed.deploy.spec import SpecCluster @@ -126,10 +128,14 @@ def creator(): ) monkeypatch.setenv( "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", - SimpleAuthentication( - username=faker.user_name(), - password=SecretStr(local_dask_gateway_server.password), - ).json(encoder=create_json_encoder_wo_secrets(SimpleAuthentication)), + json_dumps( + model_dump_with_secrets( + SimpleAuthentication( + username=faker.user_name(), + password=SecretStr(local_dask_gateway_server.password), + ), show_secrets=True + ) + ) ) return creator @@ -194,11 +200,11 @@ async def test_dask_clients_pool_acquisition_creates_client_on_demand( cluster_type=ClusterTypeInModel.ON_PREMISE, ) ) - async with clients_pool.acquire(cluster) as dask_client: + async with clients_pool.acquire(cluster): # on start it is created mocked_dask_client.create.assert_has_calls(mocked_creation_calls) - async with clients_pool.acquire(cluster) as dask_client: + async with clients_pool.acquire(cluster): # the connection already exists, so there is no new call to create mocked_dask_client.create.assert_has_calls(mocked_creation_calls) @@ -278,5 +284,5 @@ def just_a_quick_fct(x, y): ) future = dask_client.backend.client.submit(just_a_quick_fct, 12, 23) assert future - result = await future.result(timeout=10) # type: ignore + result = await future.result(timeout=10) assert result == 35 From 3f6bf8794925a101a6eb3eaa16dbdd6a112c14ef Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 14:06:55 +0100 Subject: [PATCH 048/121] fix timedelta --- .../fastapi/long_running_tasks/_context_manager.py | 2 +- .../modules/dynamic_sidecar/api_client/_public.py | 14 ++++++++------ .../modules/dynamic_sidecar/api_client/_thin.py | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py index 2c001525173..35b734ac055 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py @@ -118,7 +118,7 @@ async def _wait_task_completion() -> None: logger.debug("%s, %s", f"{task_id=}", f"{result=}") yield result - except asyncio.TimeoutError as e: + except TimeoutError as e: await client.cancel_and_delete_task(task_id) raise TaskClientTimeoutError( task_id=task_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py index 3ee32e3a9b0..5945e07b8e3 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -358,7 +358,7 @@ async def restore_service_state(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> i result: Any | None = await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), _debug_progress_callback, ) assert isinstance(result, int) # nosec @@ -392,7 +392,7 @@ async def save_service_state( result: Any | None = await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), progress_callback, ) assert isinstance(result, int) # nosec @@ -411,7 +411,7 @@ async def pull_service_input_ports( transferred_bytes = await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), _debug_progress_callback, ) return transferred_bytes or 0 @@ -429,7 +429,7 @@ async def pull_service_output_ports( result: Any | None = await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), _debug_progress_callback, ) assert isinstance(result, int) # nosec @@ -448,7 +448,7 @@ async def push_service_output_ports( await self._await_for_result( task_id, dynamic_sidecar_endpoint, - self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + self._dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), progress_callback, ) @@ -495,7 +495,9 @@ async def get_service_activity( dynamic_sidecar_endpoint ) decoded_response = response.json() - return ActivityInfo.model_validate(decoded_response) if decoded_response else None + return ( + ActivityInfo.model_validate(decoded_response) if decoded_response else None + ) async def free_reserved_disk_space( self, dynamic_sidecar_endpoint: AnyHttpUrl diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index 81319df3bbf..21ef1bbe279 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -35,7 +35,7 @@ def __init__(self, app: FastAPI): # timeouts self._health_request_timeout = Timeout(1.0, connect=1.0) self._save_restore_timeout = Timeout( - scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), connect=scheduler_settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, ) self._restart_containers_timeout = Timeout( From 531d3dd634abcd15029886c12ec146b54e9f4ef6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 14:14:11 +0100 Subject: [PATCH 049/121] fix mypy --- .../api/routes/dynamic_services.py | 2 +- .../simcore_service_director_v2/models/comp_tasks.py | 10 ++++------ 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py index e953c0f7d75..24db21cbd23 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py @@ -273,7 +273,7 @@ async def service_retrieve_data_on_ports( dynamic_services_settings.DYNAMIC_SCHEDULER ) timeout = httpx.Timeout( - dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT.total_seconds(), connect=dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 41a3b0701b8..57e99695670 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -74,13 +74,13 @@ def _migrate_from_requirements(cls, v, info: ValidationInfo): model_config = ConfigDict( from_attributes=True, json_schema_extra={ - "examples": [ + "examples": [ # type: ignore { "name": "simcore/services/dynamic/jupyter-octave-python-math", "tag": "1.3.1", "node_requirements": node_req_example, } - for node_req_example in NodeRequirements.model_config[ + for node_req_example in NodeRequirements.model_config[ # type: ignore "json_schema_extra" ]["examples"] ] @@ -237,11 +237,9 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: "pricing_unit_id": 1, "pricing_unit_cost_id": 1, }, - "hardware_info": HardwareInfo.model_config["json_schema_extra"][ - "examples" - ][0], + "hardware_info": next(iter(HardwareInfo.model_config["json_schema_extra"]["examples"])), # type: ignore } - for image_example in Image.model_config["json_schema_extra"]["examples"] + for image_example in Image.model_config["json_schema_extra"]["examples"] # type: ignore ] }, ) From bd60f9432928e99bba753b3c53cfb1d13cc43e0b Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 14:22:21 +0100 Subject: [PATCH 050/121] fix mypy --- .../src/simcore_service_director_v2/models/pricing.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/pricing.py b/services/director-v2/src/simcore_service_director_v2/models/pricing.py index 52a61d8c9e3..35ff94c744d 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/pricing.py +++ b/services/director-v2/src/simcore_service_director_v2/models/pricing.py @@ -13,6 +13,7 @@ class PricingInfo(BaseModel): pricing_unit_id: PricingUnitId pricing_unit_cost_id: PricingUnitCostId pricing_unit_cost: Decimal + model_config = ConfigDict( json_schema_extra={ "examples": [ @@ -20,7 +21,7 @@ class PricingInfo(BaseModel): "pricing_plan_id": 1, "pricing_unit_id": 1, "pricing_unit_cost_id": 1, - "pricing_unit_cost": Decimal(10), + "pricing_unit_cost": Decimal(10), # type: ignore } ] } From cc6470fd1f22ef85f25b62404cf6dc0392af2f57 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 15:12:09 +0100 Subject: [PATCH 051/121] fixing --- .../src/simcore_service_director_v2/constants.py | 3 +-- .../src/simcore_service_director_v2/core/errors.py | 5 +---- .../test_modules_dynamic_sidecar_client_api_public.py | 11 +++++++---- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/constants.py b/services/director-v2/src/simcore_service_director_v2/constants.py index fc700254ed0..b84865745df 100644 --- a/services/director-v2/src/simcore_service_director_v2/constants.py +++ b/services/director-v2/src/simcore_service_director_v2/constants.py @@ -1,5 +1,4 @@ from typing import Final - # dynamic services DYNAMIC_SIDECAR_SERVICE_PREFIX: Final[str] = "dy-sidecar" @@ -14,7 +13,7 @@ # - itisfoundation # - 10.0.0.0:8473 (IP & Port) DYNAMIC_SIDECAR_DOCKER_IMAGE_RE = ( - r"(^([_a-zA-Z0-9:.-]+)/)?(dynamic-sidecar):([_a-zA-Z0-9.-]+$)" + r"^(([_a-zA-Z0-9:.-]+)/)?(dynamic-sidecar):([_a-zA-Z0-9.-]+)$" ) REGEX_DY_SERVICE_SIDECAR = rf"^{DYNAMIC_SIDECAR_SERVICE_PREFIX}_[a-zA-Z0-9-_]*" diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index dac383a10e3..b42432fb118 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -28,9 +28,6 @@ class DirectorError(Exception): """Basic exception""" - def message(self) -> str: - return f"{self.args[0]}" - class ConfigurationError(DirectorError): """An error in the director-v2 configuration""" @@ -161,7 +158,7 @@ def get_errors(self) -> list[ErrorDict]: f"{self.project_id}", f"{self.node_id}", ), - "msg": self.message(), + "msg": f"{self.args[0]}", "type": self.code, }, ] diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py index f003acf6ad0..c748fc1cd1b 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py @@ -4,6 +4,9 @@ from contextlib import contextmanager from typing import Any, AsyncIterable, Callable, Iterator from unittest.mock import AsyncMock +from models_library.api_schemas_dynamic_sidecar.containers import ( + ActivityInfoOrNone +) import pytest from common_library.json_serialization import json_dumps @@ -352,21 +355,21 @@ async def test_update_volume_state( @pytest.mark.parametrize( - "mock_json", + "mock_dict", [{"seconds_inactive": 1}, {"seconds_inactive": 0}, None], ) async def test_get_service_activity( get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl, - mock_json: dict[str, Any], + mock_dict: dict[str, Any], ) -> None: with get_patched_client( "get_containers_activity", return_value=Response( - status_code=status.HTTP_200_OK, text=json_dumps(mock_json) + status_code=status.HTTP_200_OK, text=json_dumps(mock_dict) ), ) as client: - assert await client.get_service_activity(dynamic_sidecar_endpoint) == mock_json + assert await client.get_service_activity(dynamic_sidecar_endpoint) == TypeAdapter(ActivityInfoOrNone).validate_python(mock_dict) async def test_free_reserved_disk_space( From b68ffc0ee3fe7bf1a533cd4ca147376643158088 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 15:28:58 +0100 Subject: [PATCH 052/121] fix mypy --- .../api/errors/http_error.py | 6 +++-- .../api/errors/validation_error.py | 4 ++- .../core/errors.py | 25 +++++++++---------- .../modules/dynamic_sidecar/errors.py | 2 +- 4 files changed, 20 insertions(+), 17 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py b/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py index 2026774d7a5..5edfb25aa20 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py +++ b/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py @@ -6,7 +6,9 @@ from starlette.responses import JSONResponse -async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse: +async def http_error_handler(_: Request, exc: Exception) -> JSONResponse: + assert isinstance(exc, HTTPException) + return JSONResponse( content=jsonable_encoder({"errors": [exc.detail]}), status_code=exc.status_code ) @@ -22,7 +24,7 @@ def make_http_error_handler_for_exception( SEE https://docs.python.org/3/library/exceptions.html#concrete-exceptions """ - async def _http_error_handler(_: Request, exc: type[BaseException]) -> JSONResponse: + async def _http_error_handler(_: Request, exc: Exception) -> JSONResponse: assert isinstance(exc, exception_cls) # nosec return JSONResponse( content=jsonable_encoder({"errors": [str(exc)]}), status_code=status_code diff --git a/services/director-v2/src/simcore_service_director_v2/api/errors/validation_error.py b/services/director-v2/src/simcore_service_director_v2/api/errors/validation_error.py index fb70f6791ac..b3509cbbec9 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/errors/validation_error.py +++ b/services/director-v2/src/simcore_service_director_v2/api/errors/validation_error.py @@ -12,8 +12,10 @@ async def http422_error_handler( _: Request, - exc: Union[RequestValidationError, ValidationError], + exc: Exception, ) -> JSONResponse: + assert isinstance(exc, RequestValidationError | ValidationError) + return JSONResponse( content=jsonable_encoder({"errors": exc.errors()}), status_code=HTTP_422_UNPROCESSABLE_ENTITY, diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index b42432fb118..d5b9f0d2183 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -129,8 +129,6 @@ class WalletNotEnoughCreditsError(OsparcErrorMixin, DirectorError): class SchedulerError(DirectorError): - code = "scheduler_error" - def __init__(self, msg: str | None = None): super().__init__(msg or "Unexpected error in the scheduler") @@ -144,6 +142,7 @@ def __init__(self, pipeline_id: str, msg: str | None = None): class TaskSchedulingError(SchedulerError): """A task cannot be scheduled""" + code: str = "task scheduler error" def __init__(self, project_id: ProjectID, node_id: NodeID, msg: str | None = None): super().__init__(msg=msg) @@ -225,12 +224,12 @@ def get_errors(self) -> list[ErrorDict]: class ComputationalSchedulerChangedError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.scheduler_changed" + code = "computational_backend.scheduler_changed" # type: ignore msg_template = "The dask scheduler ID changed from '{original_scheduler_id}' to '{current_scheduler_id}'" class ComputationalBackendNotConnectedError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.not_connected" + code = "computational_backend.not_connected" # type: ignore msg_template = "The dask computational backend is not connected" @@ -239,24 +238,24 @@ class ComputationalBackendNoS3AccessError(OsparcErrorMixin, SchedulerError): class ComputationalBackendTaskNotFoundError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.task_not_found" + code = "computational_backend.task_not_found" # type: ignore msg_template = ( "The dask computational backend does not know about the task '{job_id}'" ) class ComputationalBackendTaskResultsNotReadyError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.task_result_not_ready" + code = "computational_backend.task_result_not_ready" # type: ignore msg_template = "The task result is not ready yet for job '{job_id}'" class ClustersKeeperNotAvailableError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.clusters_keeper_not_available" + code = "computational_backend.clusters_keeper_not_available" # type: ignore msg_template = "clusters-keeper service is not available!" class ComputationalBackendOnDemandNotReadyError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.on_demand_cluster.not_ready" + code = "computational_backend.on_demand_cluster.not_ready" # type: ignore msg_template = ( "The on demand computational cluster is not ready 'est. remaining time: {eta}'" ) @@ -266,7 +265,7 @@ class ComputationalBackendOnDemandNotReadyError(OsparcErrorMixin, SchedulerError # SCHEDULER/CLUSTER ERRORS # class ClusterNotFoundError(OsparcErrorMixin, SchedulerError): - code = "cluster.not_found" + code = "cluster.not_found" # type: ignore msg_template = "The cluster '{cluster_id}' not found" @@ -284,24 +283,24 @@ class ClusterInvalidOperationError(OsparcErrorMixin, SchedulerError): class DaskClientRequestError(OsparcErrorMixin, SchedulerError): - code = "dask_client.request.error" + code = "dask_client.request.error" # type: ignore msg_template = ( "The dask client to cluster on '{endpoint}' did an invalid request '{error}'" ) class DaskClusterError(OsparcErrorMixin, SchedulerError): - code = "cluster.error" + code = "cluster.error" # type: ignore msg_template = "The dask cluster on '{endpoint}' encountered an error: '{error}'" class DaskGatewayServerError(OsparcErrorMixin, SchedulerError): - code = "gateway.error" + code = "gateway.error" # type: ignore msg_template = "The dask gateway on '{endpoint}' encountered an error: '{error}'" class DaskClientAcquisisitonError(OsparcErrorMixin, SchedulerError): - code = "dask_client.acquisition.error" + code = "dask_client.acquisition.error" # type: ignore msg_template = ( "The dask client to cluster '{cluster}' encountered an error '{error}'" ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py index 62c38278e1c..8b40a4e0f35 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py @@ -40,5 +40,5 @@ class LegacyServiceIsNotSupportedError(DirectorError): class UnexpectedContainerStatusError(OsparcErrorMixin, DynamicSidecarError): - code = "dynamic_sidecar.container_status" + code = "dynamic_sidecar.container_status" # type: ignore msg_template = "Unexpected status from containers: {containers_with_error}" From 820dcfa3b2cbee53aa8024e2ea36d9c6d1c882fb Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 7 Nov 2024 16:00:05 +0100 Subject: [PATCH 053/121] fix version pattern --- .../tests/unit/with_dbs/test_api_route_computations.py | 1 - 1 file changed, 1 deletion(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 156b4c5b881..11fd2ded27a 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -142,7 +142,6 @@ def mocked_director_service_fcts( assert_all_called=False, assert_all_mocked=True, ) as respx_mock: - assert VersionStr.regex respx_mock.get( re.compile( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+$" From 6e06e45885b9859e3d9668304dbefe45f1488248 Mon Sep 17 00:00:00 2001 From: Andrei Neagu Date: Fri, 8 Nov 2024 09:49:06 +0100 Subject: [PATCH 054/121] regenerate docker models --- .../generated_models/docker_rest_api.py | 499 +++++++++++++++++- .../openapi-pydantic-models-generator.bash | 2 +- 2 files changed, 497 insertions(+), 4 deletions(-) diff --git a/packages/models-library/src/models_library/generated_models/docker_rest_api.py b/packages/models-library/src/models_library/generated_models/docker_rest_api.py index 9083641e020..83f88080da5 100644 --- a/packages/models-library/src/models_library/generated_models/docker_rest_api.py +++ b/packages/models-library/src/models_library/generated_models/docker_rest_api.py @@ -1,16 +1,20 @@ # generated by datamodel-codegen: # filename: https://docs.docker.com/reference/api/engine/version/v1.41.yaml -# timestamp: 2024-10-15T11:03:37+00:00 +# timestamp: 2024-11-08T08:47:46+00:00 from __future__ import annotations +from datetime import datetime from enum import Enum from typing import Any -from pydantic import AwareDatetime, BaseModel, Field, RootModel +from pydantic import BaseModel, ConfigDict, Field, RootModel class Model(RootModel[Any]): + model_config = ConfigDict( + populate_by_name=True, + ) root: Any @@ -25,6 +29,9 @@ class Port(BaseModel): An open port on a container """ + model_config = ConfigDict( + populate_by_name=True, + ) ip: str | None = Field( default=None, alias="IP", @@ -63,6 +70,9 @@ class MountPoint(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) type: Type1 | None = Field( default=None, alias="Type", @@ -118,6 +128,9 @@ class DeviceMapping(BaseModel): A device mapping between the host and container """ + model_config = ConfigDict( + populate_by_name=True, + ) path_on_host: str | None = Field(default=None, alias="PathOnHost") path_in_container: str | None = Field(default=None, alias="PathInContainer") cgroup_permissions: str | None = Field(default=None, alias="CgroupPermissions") @@ -128,6 +141,9 @@ class DeviceRequest(BaseModel): A request for devices to be sent to device drivers """ + model_config = ConfigDict( + populate_by_name=True, + ) driver: str | None = Field(default=None, alias="Driver", examples=["nvidia"]) count: int | None = Field(default=None, alias="Count", examples=[-1]) device_i_ds: list[str] | None = Field( @@ -149,6 +165,9 @@ class DeviceRequest(BaseModel): class ThrottleDevice(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) path: str | None = Field(default=None, alias="Path", description="Device path") rate: int | None = Field(default=None, alias="Rate", description="Rate", ge=0) @@ -188,6 +207,9 @@ class BindOptions(BaseModel): Optional configuration for the `bind` type. """ + model_config = ConfigDict( + populate_by_name=True, + ) propagation: Propagation | None = Field( default=None, alias="Propagation", @@ -203,6 +225,9 @@ class DriverConfig(BaseModel): Map of driver specific options """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -220,6 +245,9 @@ class VolumeOptions(BaseModel): Optional configuration for the `volume` type. """ + model_config = ConfigDict( + populate_by_name=True, + ) no_copy: bool | None = Field( default=False, alias="NoCopy", @@ -238,6 +266,9 @@ class TmpfsOptions(BaseModel): Optional configuration for the `tmpfs` type. """ + model_config = ConfigDict( + populate_by_name=True, + ) size_bytes: int | None = Field( default=None, alias="SizeBytes", @@ -251,6 +282,9 @@ class TmpfsOptions(BaseModel): class Mount(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) target: str | None = Field( default=None, alias="Target", description="Container path." ) @@ -318,6 +352,9 @@ class RestartPolicy(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: Name | None = Field( default=None, alias="Name", @@ -331,11 +368,17 @@ class RestartPolicy(BaseModel): class BlkioWeightDeviceItem(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) path: str | None = Field(default=None, alias="Path") weight: int | None = Field(default=None, alias="Weight", ge=0) class Ulimit(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field(default=None, alias="Name", description="Name of ulimit") soft: int | None = Field(default=None, alias="Soft", description="Soft limit") hard: int | None = Field(default=None, alias="Hard", description="Hard limit") @@ -346,6 +389,9 @@ class Resources(BaseModel): A container's resources (cgroups config, ulimits, etc) """ + model_config = ConfigDict( + populate_by_name=True, + ) cpu_shares: int | None = Field( default=None, alias="CpuShares", @@ -518,6 +564,9 @@ class Limit(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) nano_cp_us: int | None = Field( default=None, alias="NanoCPUs", examples=[4000000000] ) @@ -533,16 +582,25 @@ class Limit(BaseModel): class NamedResourceSpec(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) kind: str | None = Field(default=None, alias="Kind") value: str | None = Field(default=None, alias="Value") class DiscreteResourceSpec(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) kind: str | None = Field(default=None, alias="Kind") value: int | None = Field(default=None, alias="Value") class GenericResource(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) named_resource_spec: NamedResourceSpec | None = Field( default=None, alias="NamedResourceSpec" ) @@ -558,6 +616,9 @@ class GenericResources(RootModel[list[GenericResource]]): """ + model_config = ConfigDict( + populate_by_name=True, + ) root: list[GenericResource] = Field( ..., description="User-defined resources can be either Integer resources (e.g, `SSD=3`) or\nString resources (e.g, `GPU=UUID1`).\n", @@ -576,6 +637,9 @@ class HealthConfig(BaseModel): A test to perform to check that the container is healthy. """ + model_config = ConfigDict( + populate_by_name=True, + ) test: list[str] | None = Field( default=None, alias="Test", @@ -626,7 +690,10 @@ class HealthcheckResult(BaseModel): """ - start: AwareDatetime | None = Field( + model_config = ConfigDict( + populate_by_name=True, + ) + start: datetime | None = Field( default=None, alias="Start", description="Date and time at which this check started in\n[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format with nano-seconds.\n", @@ -666,6 +733,9 @@ class LogConfig(BaseModel): The logging configuration for this container """ + model_config = ConfigDict( + populate_by_name=True, + ) type: Type3 | None = Field(default=None, alias="Type") config: dict[str, str] | None = Field(default=None, alias="Config") @@ -687,6 +757,9 @@ class CgroupnsMode(str, Enum): class ConsoleSizeItem(RootModel[int]): + model_config = ConfigDict( + populate_by_name=True, + ) root: int = Field(..., ge=0) @@ -714,6 +787,9 @@ class ContainerConfig(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) hostname: str | None = Field( default=None, alias="Hostname", @@ -853,6 +929,9 @@ class ImageConfig(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) hostname: str | None = Field( default=None, alias="Hostname", @@ -1011,6 +1090,9 @@ class Address(BaseModel): Address represents an IPv4 or IPv6 IP address. """ + model_config = ConfigDict( + populate_by_name=True, + ) addr: str | None = Field(default=None, alias="Addr", description="IP address.") prefix_len: int | None = Field( default=None, alias="PrefixLen", description="Mask length of the IP address." @@ -1024,6 +1106,9 @@ class PortBinding(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) host_ip: str | None = Field( default=None, alias="HostIp", @@ -1045,6 +1130,9 @@ class GraphDriverData(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field( ..., alias="Name", @@ -1071,6 +1159,9 @@ class RootFs(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) type: str = Field(..., alias="Type", examples=["layers"]) layers: list[str] | None = Field( default=None, @@ -1091,6 +1182,9 @@ class Metadata(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) last_tag_time: str | None = Field( default=None, alias="LastTagTime", @@ -1105,6 +1199,9 @@ class ImageInspect(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="Id", @@ -1227,6 +1324,9 @@ class ImageInspect(BaseModel): class ImageSummary(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str = Field( ..., alias="Id", @@ -1309,6 +1409,9 @@ class ImageSummary(BaseModel): class AuthConfig(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) username: str | None = None password: str | None = None email: str | None = None @@ -1316,6 +1419,9 @@ class AuthConfig(BaseModel): class ProcessConfig(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) privileged: bool | None = None user: str | None = None tty: bool | None = None @@ -1341,6 +1447,9 @@ class UsageData(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) size: int = Field( ..., alias="Size", @@ -1354,6 +1463,9 @@ class UsageData(BaseModel): class Volume(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field( ..., alias="Name", description="Name of the volume.", examples=["tardis"] ) @@ -1416,6 +1528,9 @@ class VolumeConfig(BaseModel): Volume configuration """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -1452,6 +1567,9 @@ class VolumeListResponse(BaseModel): Volume list response """ + model_config = ConfigDict( + populate_by_name=True, + ) volumes: list[Volume] | None = Field( default=None, alias="Volumes", description="List of volumes" ) @@ -1470,6 +1588,9 @@ class ConfigReference(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) network: str | None = Field( default=None, alias="Network", @@ -1479,6 +1600,9 @@ class ConfigReference(BaseModel): class IPAMConfig(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) subnet: str | None = Field(default=None, alias="Subnet", examples=["172.20.0.0/16"]) ip_range: str | None = Field( default=None, alias="IPRange", examples=["172.20.10.0/24"] @@ -1492,6 +1616,9 @@ class IPAMConfig(BaseModel): class NetworkContainer(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field(default=None, alias="Name", examples=["container_1"]) endpoint_id: str | None = Field( default=None, @@ -1513,6 +1640,9 @@ class PeerInfo(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -1547,6 +1677,9 @@ class BuildCache(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="ID", @@ -1611,15 +1744,24 @@ class ImageID(BaseModel): Image ID or Digest """ + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field(default=None, alias="ID") class ErrorDetail(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) code: int | None = None message: str | None = None class ProgressDetail(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) current: int | None = None total: int | None = None @@ -1629,6 +1771,9 @@ class ErrorResponse(BaseModel): Represents an error. """ + model_config = ConfigDict( + populate_by_name=True, + ) message: str = Field(..., description="The error message.") @@ -1637,6 +1782,9 @@ class IdResponse(BaseModel): Response to an API call that returns just an Id """ + model_config = ConfigDict( + populate_by_name=True, + ) id: str = Field(..., alias="Id", description="The id of the newly created object.") @@ -1646,6 +1794,9 @@ class EndpointIPAMConfig(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) i_pv4_address: str | None = Field( default=None, alias="IPv4Address", examples=["172.20.30.33"] ) @@ -1658,6 +1809,9 @@ class EndpointIPAMConfig(BaseModel): class PluginMount(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field(..., alias="Name", examples=["some-mount"]) description: str = Field( ..., @@ -1672,6 +1826,9 @@ class PluginMount(BaseModel): class PluginDevice(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field(..., alias="Name") description: str = Field(..., alias="Description") settable: list[str] = Field(..., alias="Settable") @@ -1679,6 +1836,9 @@ class PluginDevice(BaseModel): class PluginEnv(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field(..., alias="Name") description: str = Field(..., alias="Description") settable: list[str] = Field(..., alias="Settable") @@ -1686,6 +1846,9 @@ class PluginEnv(BaseModel): class PluginInterfaceType(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) prefix: str = Field(..., alias="Prefix") capability: str = Field(..., alias="Capability") version: str = Field(..., alias="Version") @@ -1698,6 +1861,9 @@ class PluginPrivilege(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field(default=None, alias="Name", examples=["network"]) description: str | None = Field(default=None, alias="Description") value: list[str] | None = Field(default=None, alias="Value", examples=[["host"]]) @@ -1708,6 +1874,9 @@ class Settings(BaseModel): Settings that can be modified by users. """ + model_config = ConfigDict( + populate_by_name=True, + ) mounts: list[PluginMount] = Field(..., alias="Mounts") env: list[str] = Field(..., alias="Env", examples=[["DEBUG=0"]]) args: list[str] = Field(..., alias="Args") @@ -1728,6 +1897,9 @@ class Interface(BaseModel): The interface between Docker and the plugin """ + model_config = ConfigDict( + populate_by_name=True, + ) types: list[PluginInterfaceType] = Field( ..., alias="Types", examples=[["docker.volumedriver/1.0"]] ) @@ -1741,15 +1913,24 @@ class Interface(BaseModel): class User(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) uid: int | None = Field(default=None, alias="UID", examples=[1000]) gid: int | None = Field(default=None, alias="GID", examples=[1000]) class Network1(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) type: str = Field(..., alias="Type", examples=["host"]) class Linux(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) capabilities: list[str] = Field( ..., alias="Capabilities", examples=[["CAP_SYS_ADMIN", "CAP_SYSLOG"]] ) @@ -1758,6 +1939,9 @@ class Linux(BaseModel): class Args(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field(..., alias="Name", examples=["args"]) description: str = Field( ..., alias="Description", examples=["command line arguments"] @@ -1767,6 +1951,9 @@ class Args(BaseModel): class Rootfs(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) type: str | None = Field(default=None, examples=["layers"]) diff_ids: list[str] | None = Field( default=None, @@ -1784,6 +1971,9 @@ class Config(BaseModel): The config of a plugin. """ + model_config = ConfigDict( + populate_by_name=True, + ) docker_version: str | None = Field( default=None, alias="DockerVersion", @@ -1839,6 +2029,9 @@ class Plugin(BaseModel): A plugin for the Engine API """ + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="Id", @@ -1878,6 +2071,9 @@ class ObjectVersion(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) index: int | None = Field(default=None, alias="Index", examples=[373531]) @@ -1901,6 +2097,9 @@ class Availability(str, Enum): class NodeSpec(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -1930,6 +2129,9 @@ class Platform(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) architecture: str | None = Field( default=None, alias="Architecture", @@ -1945,6 +2147,9 @@ class Platform(BaseModel): class Plugin1(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) type: str | None = Field(default=None, alias="Type") name: str | None = Field(default=None, alias="Name") @@ -1954,6 +2159,9 @@ class EngineDescription(BaseModel): EngineDescription provides information about an engine. """ + model_config = ConfigDict( + populate_by_name=True, + ) engine_version: str | None = Field( default=None, alias="EngineVersion", examples=["17.06.0"] ) @@ -1994,6 +2202,9 @@ class TLSInfo(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) trust_root: str | None = Field( default=None, alias="TrustRoot", @@ -2037,6 +2248,9 @@ class Orchestration(BaseModel): Orchestration configuration. """ + model_config = ConfigDict( + populate_by_name=True, + ) task_history_retention_limit: int | None = Field( default=None, alias="TaskHistoryRetentionLimit", @@ -2050,6 +2264,9 @@ class Raft(BaseModel): Raft configuration. """ + model_config = ConfigDict( + populate_by_name=True, + ) snapshot_interval: int | None = Field( default=None, alias="SnapshotInterval", @@ -2086,6 +2303,9 @@ class Dispatcher(BaseModel): Dispatcher configuration. """ + model_config = ConfigDict( + populate_by_name=True, + ) heartbeat_period: int | None = Field( default=None, alias="HeartbeatPeriod", @@ -2105,6 +2325,9 @@ class Protocol(str, Enum): class ExternalCA(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) protocol: Protocol | None = Field( default=Protocol.cfssl, alias="Protocol", @@ -2132,6 +2355,9 @@ class CaConfig(BaseModel): CA configuration. """ + model_config = ConfigDict( + populate_by_name=True, + ) node_cert_expiry: int | None = Field( default=None, alias="NodeCertExpiry", @@ -2165,6 +2391,9 @@ class EncryptionConfig(BaseModel): Parameters related to encryption-at-rest. """ + model_config = ConfigDict( + populate_by_name=True, + ) auto_lock_managers: bool | None = Field( default=None, alias="AutoLockManagers", @@ -2183,6 +2412,9 @@ class LogDriver(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -2202,6 +2434,9 @@ class TaskDefaults(BaseModel): Defaults for creating tasks in this cluster. """ + model_config = ConfigDict( + populate_by_name=True, + ) log_driver: LogDriver | None = Field( default=None, alias="LogDriver", @@ -2214,6 +2449,9 @@ class SwarmSpec(BaseModel): User modifiable swarm configuration. """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -2262,6 +2500,9 @@ class ClusterInfo(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="ID", @@ -2315,6 +2556,9 @@ class JoinTokens(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) worker: str | None = Field( default=None, alias="Worker", @@ -2334,6 +2578,9 @@ class JoinTokens(BaseModel): class Swarm(ClusterInfo): + model_config = ConfigDict( + populate_by_name=True, + ) join_tokens: JoinTokens | None = Field(default=None, alias="JoinTokens") @@ -2350,6 +2597,9 @@ class PluginSpec(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -2371,6 +2621,9 @@ class CredentialSpec(BaseModel): CredentialSpec for managed service account (Windows only) """ + model_config = ConfigDict( + populate_by_name=True, + ) config: str | None = Field( default=None, alias="Config", @@ -2395,6 +2648,9 @@ class SeLinuxContext(BaseModel): SELinux labels of the container """ + model_config = ConfigDict( + populate_by_name=True, + ) disable: bool | None = Field( default=None, alias="Disable", description="Disable SELinux" ) @@ -2417,6 +2673,9 @@ class Privileges(BaseModel): Security options for the container """ + model_config = ConfigDict( + populate_by_name=True, + ) credential_spec: CredentialSpec | None = Field( default=None, alias="CredentialSpec", @@ -2436,6 +2695,9 @@ class DnsConfig(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) nameservers: list[str] | None = Field( default=None, alias="Nameservers", @@ -2457,6 +2719,9 @@ class File(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -2476,6 +2741,9 @@ class File(BaseModel): class Secret(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) file: File | None = Field( default=None, alias="File", @@ -2503,6 +2771,9 @@ class File1(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -2522,6 +2793,9 @@ class File1(BaseModel): class Config1(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) file: File1 | None = Field( default=None, alias="File", @@ -2569,6 +2843,9 @@ class ContainerSpec(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) image: str | None = Field( default=None, alias="Image", @@ -2706,6 +2983,9 @@ class NetworkAttachmentSpec(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) container_id: str | None = Field( default=None, alias="ContainerID", @@ -2730,6 +3010,9 @@ class RestartPolicy1(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) condition: Condition | None = Field( default=None, alias="Condition", description="Condition for restart." ) @@ -2749,6 +3032,9 @@ class RestartPolicy1(BaseModel): class Spread(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) spread_descriptor: str | None = Field( default=None, alias="SpreadDescriptor", @@ -2757,10 +3043,16 @@ class Spread(BaseModel): class Preference(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) spread: Spread | None = Field(default=None, alias="Spread") class Placement(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) constraints: list[str] | None = Field( default=None, alias="Constraints", @@ -2806,6 +3098,9 @@ class LogDriver1(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field(default=None, alias="Name") options: dict[str, str] | None = Field(default=None, alias="Options") @@ -2829,12 +3124,18 @@ class TaskState(str, Enum): class ContainerStatus(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) container_id: str | None = Field(default=None, alias="ContainerID") pid: int | None = Field(default=None, alias="PID") exit_code: int | None = Field(default=None, alias="ExitCode") class Status1(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) timestamp: str | None = Field(default=None, alias="Timestamp") state: TaskState | None = Field(default=None, alias="State") message: str | None = Field(default=None, alias="Message") @@ -2845,6 +3146,9 @@ class Status1(BaseModel): class Replicated(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) replicas: int | None = Field(default=None, alias="Replicas") @@ -2855,6 +3159,9 @@ class ReplicatedJob(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) max_concurrent: int | None = Field( default=1, alias="MaxConcurrent", @@ -2872,6 +3179,9 @@ class Mode(BaseModel): Scheduling mode for the service. """ + model_config = ConfigDict( + populate_by_name=True, + ) replicated: Replicated | None = Field(default=None, alias="Replicated") global_: dict[str, Any] | None = Field(default=None, alias="Global") replicated_job: ReplicatedJob | None = Field( @@ -2915,6 +3225,9 @@ class UpdateConfig(BaseModel): Specification for the update strategy of the service. """ + model_config = ConfigDict( + populate_by_name=True, + ) parallelism: int | None = Field( default=None, alias="Parallelism", @@ -2975,6 +3288,9 @@ class RollbackConfig(BaseModel): Specification for the rollback strategy of the service. """ + model_config = ConfigDict( + populate_by_name=True, + ) parallelism: int | None = Field( default=None, alias="Parallelism", @@ -3026,6 +3342,9 @@ class PublishMode(str, Enum): class EndpointPortConfig(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field(default=None, alias="Name") protocol: Type | None = Field(default=None, alias="Protocol") target_port: int | None = Field( @@ -3057,6 +3376,9 @@ class EndpointSpec(BaseModel): Properties that can be configured to access and load balance a service. """ + model_config = ConfigDict( + populate_by_name=True, + ) mode: Mode1 | None = Field( default=Mode1.vip, alias="Mode", @@ -3070,11 +3392,17 @@ class EndpointSpec(BaseModel): class VirtualIP(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) network_id: str | None = Field(default=None, alias="NetworkID") addr: str | None = Field(default=None, alias="Addr") class Endpoint(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) spec: EndpointSpec | None = Field(default=None, alias="Spec") ports: list[EndpointPortConfig] | None = Field(default=None, alias="Ports") virtual_i_ps: list[VirtualIP] | None = Field(default=None, alias="VirtualIPs") @@ -3091,6 +3419,9 @@ class UpdateStatus(BaseModel): The status of a service update. """ + model_config = ConfigDict( + populate_by_name=True, + ) state: State | None = Field(default=None, alias="State") started_at: str | None = Field(default=None, alias="StartedAt") completed_at: str | None = Field(default=None, alias="CompletedAt") @@ -3104,6 +3435,9 @@ class ServiceStatus(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) running_tasks: int | None = Field( default=None, alias="RunningTasks", @@ -3132,6 +3466,9 @@ class JobStatus(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) job_iteration: ObjectVersion | None = Field( default=None, alias="JobIteration", @@ -3145,6 +3482,9 @@ class JobStatus(BaseModel): class ImageDeleteResponseItem(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) untagged: str | None = Field( default=None, alias="Untagged", @@ -3158,12 +3498,18 @@ class ImageDeleteResponseItem(BaseModel): class ServiceUpdateResponse(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) warnings: list[str] | None = Field( default=None, alias="Warnings", description="Optional warning messages" ) class HostConfig1(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) network_mode: str | None = Field(default=None, alias="NetworkMode") @@ -3172,6 +3518,9 @@ class Driver(BaseModel): Driver represents a driver (network, logging, secrets). """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field( ..., alias="Name", description="Name of the driver.", examples=["some-driver"] ) @@ -3189,6 +3538,9 @@ class Driver(BaseModel): class SecretSpec(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", description="User-defined name of the secret." ) @@ -3222,6 +3574,9 @@ class SecretSpec(BaseModel): class Secret1(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="ID", examples=["blt1owaxmitz71s9v5zh81zun"] ) @@ -3236,6 +3591,9 @@ class Secret1(BaseModel): class ConfigSpec(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", description="User-defined name of the config." ) @@ -3255,6 +3613,9 @@ class ConfigSpec(BaseModel): class Config2(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field(default=None, alias="ID") version: ObjectVersion | None = Field(default=None, alias="Version") created_at: str | None = Field(default=None, alias="CreatedAt") @@ -3283,16 +3644,25 @@ class ContainerWaitExitError(BaseModel): container waiting error, if any """ + model_config = ConfigDict( + populate_by_name=True, + ) message: str | None = Field( default=None, alias="Message", description="Details of an error" ) class Platform1(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field(..., alias="Name") class Component(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str = Field( ..., alias="Name", description="Name of the component\n", examples=["Engine"] ) @@ -3315,6 +3685,9 @@ class SystemVersion(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) platform: Platform1 | None = Field(default=None, alias="Platform") components: list[Component] | None = Field( default=None, @@ -3422,6 +3795,9 @@ class Isolation2(str, Enum): class DefaultAddressPool(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) base: str | None = Field( default=None, alias="Base", @@ -3445,6 +3821,9 @@ class PluginsInfo(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) volume: list[str] | None = Field( default=None, alias="Volume", @@ -3487,6 +3866,9 @@ class IndexInfo(BaseModel): IndexInfo contains information about a registry. """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -3530,6 +3912,9 @@ class Runtime(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) path: str | None = Field( default=None, description="Name and, optional, path, of the OCI executable binary.\n\nIf the path is omitted, the daemon searches the host's `$PATH` for the\nbinary and uses the first result.\n", @@ -3551,6 +3936,9 @@ class Commit(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="ID", @@ -3583,6 +3971,9 @@ class PeerNode(BaseModel): Represents a peer-node in the swarm """ + model_config = ConfigDict( + populate_by_name=True, + ) node_id: str | None = Field( default=None, alias="NodeID", @@ -3601,6 +3992,9 @@ class NetworkAttachmentConfig(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) target: str | None = Field( default=None, alias="Target", @@ -3625,6 +4019,9 @@ class EventActor(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="ID", @@ -3680,6 +4077,9 @@ class SystemEventsResponse(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) type: Type5 | None = Field( default=None, alias="Type", @@ -3715,6 +4115,9 @@ class OCIDescriptor(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) media_type: str | None = Field( default=None, alias="mediaType", @@ -3740,6 +4143,9 @@ class OCIPlatform(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) architecture: str | None = Field( default=None, description="The CPU architecture, for example `amd64` or `ppc64`.\n", @@ -3776,6 +4182,9 @@ class DistributionInspectResponse(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) descriptor: OCIDescriptor = Field(..., alias="Descriptor") platforms: list[OCIPlatform] = Field( ..., @@ -3791,6 +4200,9 @@ class ResourceObject(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) nano_cp_us: int | None = Field( default=None, alias="NanoCPUs", examples=[4000000000] ) @@ -3808,6 +4220,9 @@ class Health(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) status: Status | None = Field( default=None, alias="Status", @@ -3838,10 +4253,16 @@ class PortMap(RootModel[dict[str, list[PortBinding]] | None]): """ + model_config = ConfigDict( + populate_by_name=True, + ) root: dict[str, list[PortBinding]] | None = None class IPAM(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) driver: str | None = Field( default="default", alias="Driver", @@ -3862,6 +4283,9 @@ class IPAM(BaseModel): class BuildInfo(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = None stream: str | None = None error: str | None = None @@ -3873,6 +4297,9 @@ class BuildInfo(BaseModel): class CreateImageInfo(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = None error: str | None = None error_detail: ErrorDetail | None = Field(default=None, alias="errorDetail") @@ -3882,6 +4309,9 @@ class CreateImageInfo(BaseModel): class PushImageInfo(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) error: str | None = None status: str | None = None progress: str | None = None @@ -3893,6 +4323,9 @@ class EndpointSettings(BaseModel): Configuration for a network endpoint. """ + model_config = ConfigDict( + populate_by_name=True, + ) ipam_config: EndpointIPAMConfig | None = Field(default=None, alias="IPAMConfig") links: list[str] | None = Field( default=None, alias="Links", examples=[["container_1", "container_2"]] @@ -3974,6 +4407,9 @@ class NodeDescription(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) hostname: str | None = Field( default=None, alias="Hostname", examples=["bf3067039e47"] ) @@ -3991,6 +4427,9 @@ class NodeStatus(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) state: NodeState | None = Field(default=None, alias="State") message: str | None = Field(default=None, alias="Message", examples=[""]) addr: str | None = Field( @@ -4010,6 +4449,9 @@ class ManagerStatus(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) leader: bool | None = Field(default=False, alias="Leader", examples=[True]) reachability: Reachability | None = Field(default=None, alias="Reachability") addr: str | None = Field( @@ -4027,6 +4469,9 @@ class Resources1(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) limits: Limit | None = Field( default=None, alias="Limits", description="Define resources limits." ) @@ -4040,6 +4485,9 @@ class TaskSpec(BaseModel): User modifiable task configuration. """ + model_config = ConfigDict( + populate_by_name=True, + ) plugin_spec: PluginSpec | None = Field( default=None, alias="PluginSpec", @@ -4089,6 +4537,9 @@ class TaskSpec(BaseModel): class Task(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field(default=None, alias="ID", description="The ID of the task.") version: ObjectVersion | None = Field(default=None, alias="Version") created_at: str | None = Field(default=None, alias="CreatedAt") @@ -4128,6 +4579,9 @@ class ServiceSpec(BaseModel): User modifiable configuration for a service. """ + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", description="Name of the service." ) @@ -4157,6 +4611,9 @@ class ServiceSpec(BaseModel): class Service(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field(default=None, alias="ID") version: ObjectVersion | None = Field(default=None, alias="Version") created_at: str | None = Field(default=None, alias="CreatedAt") @@ -4185,10 +4642,16 @@ class NetworkSettings1(BaseModel): A summary of the container's network settings """ + model_config = ConfigDict( + populate_by_name=True, + ) networks: dict[str, EndpointSettings] | None = Field(default=None, alias="Networks") class ContainerSummary(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="Id", description="The ID of this container" ) @@ -4257,6 +4720,9 @@ class ContainerState(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) status: Status2 | None = Field( default=None, alias="Status", @@ -4321,6 +4787,9 @@ class ContainerWaitResponse(BaseModel): OK response to ContainerWait operation """ + model_config = ConfigDict( + populate_by_name=True, + ) status_code: int = Field( ..., alias="StatusCode", description="Exit code of the container" ) @@ -4333,6 +4802,9 @@ class RegistryServiceConfig(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) allow_nondistributable_artifacts_cid_rs: list[str] | None = Field( default=None, alias="AllowNondistributableArtifactsCIDRs", @@ -4404,6 +4876,9 @@ class SwarmInfo(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) node_id: str | None = Field( default="", alias="NodeID", @@ -4453,6 +4928,9 @@ class HostConfig(Resources): Container configuration that depends on the host we are running on """ + model_config = ConfigDict( + populate_by_name=True, + ) binds: list[str] | None = Field( default=None, alias="Binds", @@ -4642,6 +5120,9 @@ class NetworkingConfig(BaseModel): """ + model_config = ConfigDict( + populate_by_name=True, + ) endpoints_config: dict[str, EndpointSettings] | None = Field( default=None, alias="EndpointsConfig", @@ -4654,6 +5135,9 @@ class NetworkSettings(BaseModel): NetworkSettings exposes the network settings in the API """ + model_config = ConfigDict( + populate_by_name=True, + ) bridge: str | None = Field( default=None, alias="Bridge", @@ -4753,6 +5237,9 @@ class NetworkSettings(BaseModel): class Network(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) name: str | None = Field( default=None, alias="Name", @@ -4864,6 +5351,9 @@ class Network(BaseModel): class Node(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field(default=None, alias="ID", examples=["24ifsmvkjbyhk"]) version: ObjectVersion | None = Field(default=None, alias="Version") created_at: str | None = Field( @@ -4885,6 +5375,9 @@ class Node(BaseModel): class SystemInfo(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) id: str | None = Field( default=None, alias="ID", diff --git a/scripts/openapi-pydantic-models-generator.bash b/scripts/openapi-pydantic-models-generator.bash index 788cb90e792..88e071a5273 100755 --- a/scripts/openapi-pydantic-models-generator.bash +++ b/scripts/openapi-pydantic-models-generator.bash @@ -1,5 +1,4 @@ #!/bin/bash -#!/bin/bash # http://redsymbol.net/articles/unofficial-bash-strict-mode/ set -o errexit set -o nounset @@ -27,6 +26,7 @@ ENTRYPOINT ["datamodel-codegen", \ "--use-standard-collections", \ "--use-union-operator", \ "--use-schema-description", \ + "--allow-population-by-field-name", \ "--use-subclass-enum", \ "--use-double-quotes", \ "--field-constraints", \ From 63be0435a897e347f5138ea0bfcc07c4fcacce8a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 10:08:04 +0100 Subject: [PATCH 055/121] fix nullable --- .../api_schemas_directorv2/clusters.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index 3f86e18d30b..6f03cb4d5f5 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -168,14 +168,15 @@ def set_default_thumbnail_if_empty(cls, v, info: ValidationInfo): class ClusterPatch(BaseCluster): - name: str | None # type: ignore[assignment] - description: str | None - type: ClusterTypeInModel | None # type: ignore[assignment] - owner: GroupID | None # type: ignore[assignment] - thumbnail: HttpUrl | None - endpoint: AnyUrl | None # type: ignore[assignment] - authentication: ExternalClusterAuthentication | None # type: ignore[assignment] + name: str | None = Field(default=None) # type: ignore[assignment] + description: str | None = Field(default=None) + type: ClusterTypeInModel | None = Field(default=None) # type: ignore[assignment] + owner: GroupID | None = Field(default=None) # type: ignore[assignment] + thumbnail: HttpUrl | None = Field(default=None) + endpoint: AnyUrl | None = Field(default=None) # type: ignore[assignment] + authentication: ExternalClusterAuthentication | None = Field(default=None) # type: ignore[assignment] access_rights: dict[GroupID, ClusterAccessRights] | None = Field( # type: ignore[assignment] + default=None, alias="accessRights" ) From d629f956f3be9d1ce66fc1e49150925cf168942e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 10:34:39 +0100 Subject: [PATCH 056/121] fix --- .../tests/unit/with_dbs/test_api_route_computations.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 11fd2ded27a..6f8de1fd238 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -147,7 +147,7 @@ def mocked_director_service_fcts( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+$" ), name="get_service", - ).respond(json={"data": [fake_service_details.model_dump(by_alias=True)]}) + ).respond(json={"data": [fake_service_details.model_dump(mode="json", by_alias=True)]}) respx_mock.get( re.compile( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+/labels" @@ -160,7 +160,7 @@ def mocked_director_service_fcts( r"/service_extras/(simcore)%2F(services)%2F(comp|dynamic|frontend)%2F.+/(.+)" ), name="get_service_extras", - ).respond(json={"data": fake_service_extras.model_dump(by_alias=True)}) + ).respond(json={"data": fake_service_extras.model_dump(mode="json", by_alias=True)}) yield respx_mock @@ -231,7 +231,7 @@ def _mocked_services_details( "key": urllib.parse.unquote(service_key), "version": service_version, "deprecated": ( - datetime.datetime.now(tz=datetime.timezone.utc) + datetime.datetime.now(tz=datetime.UTC) - datetime.timedelta(days=1) ).isoformat(), } @@ -468,6 +468,7 @@ def project_nodes_overrides(request: pytest.FixtureRequest) -> dict[str, Any]: return request.param +@pytest.mark.testit async def test_create_computation_with_wallet( minimal_configuration: None, mocked_director_service_fcts: respx.MockRouter, From dfcef2eb02796df2e5979fe0bea969acc8c80931 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 11:11:34 +0100 Subject: [PATCH 057/121] fix serializer --- .../tests/unit/with_dbs/test_api_route_clusters_details.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py index e9394aba278..2029b8a4dd3 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py @@ -85,7 +85,7 @@ async def test_local_dask_gateway_server(local_dask_gateway_server: DaskGatewayS async with cluster.get_client() as client: print(f"--> created new client {client=}, submitting a job") - res = await client.submit(lambda x: x + 1, 1) # type: ignore + res = await client.submit(lambda x: x + 1, 1) assert res == 2 print(f"--> scaling cluster {cluster=} back to 0") @@ -155,7 +155,7 @@ async def test_get_cluster_details( authentication=SimpleAuthentication( username=gateway_username, password=SecretStr(local_dask_gateway_server.password), - ).dict(by_alias=True), + ).model_dump(mode="json", by_alias=True), ) # in its present state, the cluster should have no workers cluster_out = await _get_cluster_details( From 740886bcee5cda9b8a81e8b51b82f2eca0cce352 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 11:43:47 +0100 Subject: [PATCH 058/121] fix --- services/director-v2/tests/unit/with_dbs/conftest.py | 6 +++--- .../tests/unit/with_dbs/test_api_route_dynamic_services.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 78a17e79777..7b32af07c8c 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -92,7 +92,7 @@ def creator( "inputs": ( { key: ( - json.loads(value.json(by_alias=True, exclude_unset=True)) + value.model_dump(by_alias=True, exclude_unset=True) if isinstance(value, BaseModel) else value ) @@ -113,9 +113,9 @@ def creator( if node_data.outputs else {} ), - "image": Image(name=node_data.key, tag=node_data.version).dict( # type: ignore + "image": Image(name=node_data.key, tag=node_data.version).model_dump( by_alias=True, exclude_unset=True - ), # type: ignore + ), "node_class": to_node_class(node_data.key), "internal_id": internal_id + 1, "submit": datetime.datetime.now(tz=datetime.UTC), diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py index 58557b66801..5787fa119e1 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py @@ -191,7 +191,7 @@ async def mock_retrieve_features( ] = scheduler_data_from_http_request respx_mock.post( - f"{scheduler_data_from_http_request.endpoint}/v1/containers/ports/inputs:pull", + f"{scheduler_data_from_http_request.endpoint}v1/containers/ports/inputs:pull", name="service_pull_input_ports", ).respond(json="mocked_task_id", status_code=status.HTTP_202_ACCEPTED) From 3c40aedc627eb1bb3946a8c454ba7253228a4819 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 12:46:01 +0100 Subject: [PATCH 059/121] fix url --- .../simcore_service_director_v2/core/settings.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 45a1c3b2f33..71bfb35ae13 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -6,13 +6,7 @@ from functools import cached_property from common_library.pydantic_validators import validate_numeric_string_as_timedelta -from models_library.basic_types import ( - BootModeEnum, - BuildTargetEnum, - LogLevel, - PortInt, - VersionTag, -) +from models_library.basic_types import LogLevel, PortInt, VersionTag from models_library.clusters import ( DEFAULT_CLUSTER_ID, Cluster, @@ -66,10 +60,10 @@ class DirectorV0Settings(BaseCustomSettings): def endpoint(self) -> str: url: str = str( AnyHttpUrl.build( - scheme='http', + scheme="http", host=self.DIRECTOR_HOST, port=self.DIRECTOR_PORT, - path=f"/{self.DIRECTOR_V0_VTAG}", + path=f"{self.DIRECTOR_V0_VTAG}", ) ) return url @@ -244,8 +238,7 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings): description="settings for the private registry deployed with the platform", ) DIRECTOR_V2_DOCKER_HUB_REGISTRY: RegistrySettings | None = Field( - default=None, - description="public DockerHub registry settings" + default=None, description="public DockerHub registry settings" ) DIRECTOR_V2_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field( From 3f768a622dafd4ed71feb7fb29274010b21b224f Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 13:06:27 +0100 Subject: [PATCH 060/121] fix url --- .../simcore_service_director_v2/core/settings.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 71bfb35ae13..49389832d18 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -58,15 +58,13 @@ class DirectorV0Settings(BaseCustomSettings): @cached_property def endpoint(self) -> str: - url: str = str( - AnyHttpUrl.build( - scheme="http", - host=self.DIRECTOR_HOST, - port=self.DIRECTOR_PORT, - path=f"{self.DIRECTOR_V0_VTAG}", - ) + url = AnyHttpUrl.build( # pylint: disable=no-member + scheme="http", + host=self.DIRECTOR_HOST, + port=self.DIRECTOR_PORT, + path=f"{self.DIRECTOR_V0_VTAG}", ) - return url + return f"{url}" class ComputationalBackendSettings(BaseCustomSettings): From 60010050aa425957aba023ecf5241174e2551ea5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 13:33:52 +0100 Subject: [PATCH 061/121] fix import --- services/director-v2/tests/unit/test_core_settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director-v2/tests/unit/test_core_settings.py b/services/director-v2/tests/unit/test_core_settings.py index 4b1264cc754..2151d64cfa5 100644 --- a/services/director-v2/tests/unit/test_core_settings.py +++ b/services/director-v2/tests/unit/test_core_settings.py @@ -5,7 +5,7 @@ from typing import Any import pytest -from models_library.basic_types import LogLevel +from models_library.basic_types import BootModeEnum, LogLevel from pydantic import ValidationError from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.base import DefaultFromEnvFactoryError @@ -18,7 +18,7 @@ PlacementSettings, RCloneSettings, ) -from simcore_service_director_v2.core.settings import AppSettings, BootModeEnum +from simcore_service_director_v2.core.settings import AppSettings def _get_backend_type_options() -> set[str]: From 889167f7dc77e9cf442e0ad28f2ca717d3f1a18a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 14:18:57 +0100 Subject: [PATCH 062/121] fix url types --- .../simcore_service_director_v2/cli/_core.py | 12 ++-- .../models/dynamic_services_scheduler.py | 8 +-- .../dynamic_sidecar/api_client/_public.py | 56 +++++++++---------- .../dynamic_sidecar/api_client/_thin.py | 53 ++++++++---------- .../scheduler/_core/_scheduler.py | 4 +- 5 files changed, 61 insertions(+), 72 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py index bcee8a446e4..945cb790e39 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py @@ -12,7 +12,7 @@ from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceType from models_library.services_enums import ServiceBootType, ServiceState -from pydantic import AnyHttpUrl, BaseModel, PositiveInt, TypeAdapter +from pydantic import AnyHttpUrl, BaseModel, PositiveInt from rich.live import Live from rich.table import Table from servicelib.services_utils import get_service_from_key @@ -52,14 +52,14 @@ async def _initialized_app(only_db: bool = False) -> AsyncIterator[FastAPI]: ### PROJECT SAVE STATE -def _get_dynamic_sidecar_endpoint( - settings: AppSettings, node_id: NodeIDStr -) -> AnyHttpUrl: +def _get_dynamic_sidecar_endpoint(settings: AppSettings, node_id: NodeIDStr) -> str: dynamic_sidecar_names = DynamicSidecarNamesHelper.make(NodeID(node_id)) hostname = dynamic_sidecar_names.service_name_dynamic_sidecar port = settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_PORT - url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python(f"http://{hostname}:{port}") # NOSONAR - return url + url = AnyHttpUrl.build( # pylint: disable=no-member + scheme="http", host=hostname, port=port + ) + return f"{url}" async def _save_node_state( diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index 901a8cc5f26..f9f0133c586 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -389,12 +389,12 @@ class SchedulerData(CommonServiceDetails, DynamicSidecarServiceLabels): port: PortInt = Field(default=8000, description="dynamic-sidecar port") @property - def endpoint(self) -> AnyHttpUrl: + def endpoint(self) -> str: """endpoint where all the services are exposed""" - url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( - f"http://{self.hostname}:{self.port}" # NOSONAR + url = AnyHttpUrl.build( # pylint: disable=no-member + scheme="http", host=self.hostname, port=self.port ) - return url + return f"{url}" dynamic_sidecar: DynamicSidecar = Field( ..., diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py index 5945e07b8e3..c83b33c2299 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -81,7 +81,7 @@ def _dynamic_services_scheduler_settings(self) -> DynamicServicesSchedulerSettin return settings async def is_healthy( - self, dynamic_sidecar_endpoint: AnyHttpUrl, *, with_retry: bool = True + self, dynamic_sidecar_endpoint: str, *, with_retry: bool = True ) -> bool: """returns True if service is UP and running else False""" try: @@ -97,9 +97,7 @@ async def is_healthy( except BaseHttpClientError: return False - async def containers_inspect( - self, dynamic_sidecar_endpoint: AnyHttpUrl - ) -> dict[str, Any]: + async def containers_inspect(self, dynamic_sidecar_endpoint: str) -> dict[str, Any]: """ returns dict containing docker inspect result form all dynamic-sidecar started containers @@ -112,7 +110,7 @@ async def containers_inspect( @log_decorator(logger=_logger) async def containers_docker_status( - self, dynamic_sidecar_endpoint: AnyHttpUrl + self, dynamic_sidecar_endpoint: str ) -> dict[str, dict[str, str]]: try: response = await self._thin_client.get_containers( @@ -126,7 +124,7 @@ async def containers_docker_status( @log_decorator(logger=_logger) async def toggle_service_ports_io( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, *, enable_outputs: bool, enable_inputs: bool, @@ -139,7 +137,7 @@ async def toggle_service_ports_io( @log_decorator(logger=_logger) async def service_outputs_create_dirs( - self, dynamic_sidecar_endpoint: AnyHttpUrl, outputs_labels: dict[str, Any] + self, dynamic_sidecar_endpoint: str, outputs_labels: dict[str, Any] ) -> None: await self._thin_client.post_containers_ports_outputs_dirs( dynamic_sidecar_endpoint, outputs_labels=outputs_labels @@ -147,7 +145,7 @@ async def service_outputs_create_dirs( @log_decorator(logger=_logger) async def get_entrypoint_container_name( - self, dynamic_sidecar_endpoint: AnyHttpUrl, dynamic_sidecar_network_name: str + self, dynamic_sidecar_endpoint: str, dynamic_sidecar_network_name: str ) -> str: """ While this API raises EntrypointContainerNotFoundError @@ -171,7 +169,7 @@ async def get_entrypoint_container_name( async def _attach_container_to_network( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, container_id: str, network_id: str, network_aliases: list[str], @@ -185,7 +183,7 @@ async def _attach_container_to_network( ) async def _detach_container_from_network( - self, dynamic_sidecar_endpoint: AnyHttpUrl, container_id: str, network_id: str + self, dynamic_sidecar_endpoint: str, container_id: str, network_id: str ) -> None: """detaches a container from a network if not already detached""" await self._thin_client.post_containers_networks_detach( @@ -194,7 +192,7 @@ async def _detach_container_from_network( async def attach_service_containers_to_project_network( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, dynamic_sidecar_network_name: str, project_network: str, project_id: ProjectID, @@ -250,7 +248,7 @@ async def attach_service_containers_to_project_network( async def detach_service_containers_from_project_network( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, project_network: str, project_id: ProjectID, ) -> None: @@ -282,14 +280,14 @@ async def detach_service_containers_from_project_network( async def submit_docker_compose_spec( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, compose_spec: str, ) -> None: await self._thin_client.post_containers_compose_spec( dynamic_sidecar_endpoint, compose_spec=compose_spec ) - def _get_client(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Client: + def _get_client(self, dynamic_sidecar_endpoint: str) -> Client: return Client( app=self._app, async_client=self._async_client, @@ -299,7 +297,7 @@ def _get_client(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Client: async def _await_for_result( self, task_id: TaskId, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, task_timeout: PositiveFloat, progress_callback: ProgressCallback | None = None, ) -> Any | None: @@ -315,7 +313,7 @@ async def _await_for_result( async def create_containers( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, metrics_params: CreateServiceMetricsAdditionalParams, progress_callback: ProgressCallback | None = None, ) -> None: @@ -334,7 +332,7 @@ async def create_containers( async def stop_service( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, progress_callback: ProgressCallback | None = None, ) -> None: response = await self._thin_client.post_containers_tasks_down( @@ -349,7 +347,7 @@ async def stop_service( progress_callback, ) - async def restore_service_state(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> int: + async def restore_service_state(self, dynamic_sidecar_endpoint: str) -> int: response = await self._thin_client.post_containers_tasks_state_restore( dynamic_sidecar_endpoint ) @@ -364,9 +362,7 @@ async def restore_service_state(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> i assert isinstance(result, int) # nosec return result - async def pull_user_services_images( - self, dynamic_sidecar_endpoint: AnyHttpUrl - ) -> None: + async def pull_user_services_images(self, dynamic_sidecar_endpoint: str) -> None: response = await self._thin_client.post_containers_images_pull( dynamic_sidecar_endpoint ) @@ -381,7 +377,7 @@ async def pull_user_services_images( async def save_service_state( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, progress_callback: ProgressCallback | None = None, ) -> int: response = await self._thin_client.post_containers_tasks_state_save( @@ -400,7 +396,7 @@ async def save_service_state( async def pull_service_input_ports( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, port_keys: list[ServicePortKey] | None = None, ) -> int: response = await self._thin_client.post_containers_tasks_ports_inputs_pull( @@ -418,7 +414,7 @@ async def pull_service_input_ports( async def pull_service_output_ports( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, port_keys: list[str] | None = None, ) -> int: response = await self._thin_client.post_containers_tasks_ports_outputs_pull( @@ -437,7 +433,7 @@ async def pull_service_output_ports( async def push_service_output_ports( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, progress_callback: ProgressCallback | None = None, ) -> None: response = await self._thin_client.post_containers_tasks_ports_outputs_push( @@ -452,7 +448,7 @@ async def push_service_output_ports( progress_callback, ) - async def restart_containers(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> None: + async def restart_containers(self, dynamic_sidecar_endpoint: str) -> None: response = await self._thin_client.post_containers_tasks_restart( dynamic_sidecar_endpoint ) @@ -467,7 +463,7 @@ async def restart_containers(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> None async def update_volume_state( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, volume_category: VolumeCategory, volume_status: VolumeStatus, ) -> None: @@ -489,7 +485,7 @@ async def configure_proxy( await self._thin_client.proxy_config_load(proxy_endpoint, proxy_configuration) async def get_service_activity( - self, dynamic_sidecar_endpoint: AnyHttpUrl + self, dynamic_sidecar_endpoint: str ) -> ActivityInfoOrNone: response = await self._thin_client.get_containers_activity( dynamic_sidecar_endpoint @@ -499,9 +495,7 @@ async def get_service_activity( ActivityInfo.model_validate(decoded_response) if decoded_response else None ) - async def free_reserved_disk_space( - self, dynamic_sidecar_endpoint: AnyHttpUrl - ) -> None: + async def free_reserved_disk_space(self, dynamic_sidecar_endpoint: str) -> None: await self._thin_client.post_disk_reserved_free(dynamic_sidecar_endpoint) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index 21ef1bbe279..5cc771b7316 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -5,7 +5,6 @@ from httpx import Response, Timeout from models_library.services_creation import CreateServiceMetricsAdditionalParams from models_library.sidecar_volumes import VolumeCategory, VolumeStatus -from pydantic import AnyHttpUrl from servicelib.docker_constants import SUFFIX_EGRESS_PROXY_NAME from servicelib.fastapi.http_client_thin import ( BaseThinClient, @@ -57,7 +56,7 @@ def __init__(self, app: FastAPI): def _get_url( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, postfix: str, *, no_api_version: bool = False, @@ -66,27 +65,23 @@ def _get_url( api_version = "" if no_api_version else f"{self.API_VERSION}/" return f"{dynamic_sidecar_endpoint}{api_version}{postfix}" - async def _get_health_common( - self, dynamic_sidecar_endpoint: AnyHttpUrl - ) -> Response: + async def _get_health_common(self, dynamic_sidecar_endpoint: str) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "health", no_api_version=True) return await self.client.get(url, timeout=self._health_request_timeout) @retry_on_errors() @expect_status(status.HTTP_200_OK) - async def get_health(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Response: + async def get_health(self, dynamic_sidecar_endpoint: str) -> Response: return await self._get_health_common(dynamic_sidecar_endpoint) @expect_status(status.HTTP_200_OK) - async def get_health_no_retry( - self, dynamic_sidecar_endpoint: AnyHttpUrl - ) -> Response: + async def get_health_no_retry(self, dynamic_sidecar_endpoint: str) -> Response: return await self._get_health_common(dynamic_sidecar_endpoint) @retry_on_errors() @expect_status(status.HTTP_200_OK) async def get_containers( - self, dynamic_sidecar_endpoint: AnyHttpUrl, *, only_status: bool + self, dynamic_sidecar_endpoint: str, *, only_status: bool ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers") return await self.client.get(url, params={"only_status": only_status}) @@ -95,7 +90,7 @@ async def get_containers( @expect_status(status.HTTP_204_NO_CONTENT) async def patch_containers_ports_io( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, *, enable_outputs: bool, enable_inputs: bool, @@ -108,7 +103,7 @@ async def patch_containers_ports_io( @retry_on_errors() @expect_status(status.HTTP_204_NO_CONTENT) async def post_containers_ports_outputs_dirs( - self, dynamic_sidecar_endpoint: AnyHttpUrl, *, outputs_labels: dict[str, Any] + self, dynamic_sidecar_endpoint: str, *, outputs_labels: dict[str, Any] ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs/dirs") return await self.client.post(url, json={"outputs_labels": outputs_labels}) @@ -116,7 +111,7 @@ async def post_containers_ports_outputs_dirs( @retry_on_errors() @expect_status(status.HTTP_200_OK) async def get_containers_name( - self, dynamic_sidecar_endpoint: AnyHttpUrl, *, dynamic_sidecar_network_name: str + self, dynamic_sidecar_endpoint: str, *, dynamic_sidecar_network_name: str ) -> Response: filters = json.dumps( { @@ -133,7 +128,7 @@ async def get_containers_name( @expect_status(status.HTTP_204_NO_CONTENT) async def post_containers_networks_attach( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, *, container_id: str, network_id: str, @@ -152,7 +147,7 @@ async def post_containers_networks_attach( @expect_status(status.HTTP_204_NO_CONTENT) async def post_containers_networks_detach( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, *, container_id: str, network_id: str, @@ -170,7 +165,7 @@ async def post_containers_networks_detach( @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_compose_spec( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, *, compose_spec: str, ) -> Response: @@ -181,7 +176,7 @@ async def post_containers_compose_spec( @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, *, metrics_params: CreateServiceMetricsAdditionalParams, ) -> Response: @@ -193,7 +188,7 @@ async def post_containers_tasks( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_down( - self, dynamic_sidecar_endpoint: AnyHttpUrl + self, dynamic_sidecar_endpoint: str ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers:down") return await self.client.post(url) @@ -201,7 +196,7 @@ async def post_containers_tasks_down( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_state_restore( - self, dynamic_sidecar_endpoint: AnyHttpUrl + self, dynamic_sidecar_endpoint: str ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/state:restore") return await self.client.post(url) @@ -209,7 +204,7 @@ async def post_containers_tasks_state_restore( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_state_save( - self, dynamic_sidecar_endpoint: AnyHttpUrl + self, dynamic_sidecar_endpoint: str ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/state:save") return await self.client.post(url) @@ -217,7 +212,7 @@ async def post_containers_tasks_state_save( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_images_pull( - self, dynamic_sidecar_endpoint: AnyHttpUrl + self, dynamic_sidecar_endpoint: str ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/images:pull") return await self.client.post(url) @@ -226,7 +221,7 @@ async def post_containers_images_pull( @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_ports_inputs_pull( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, port_keys: list[str] | None = None, ) -> Response: port_keys = [] if port_keys is None else port_keys @@ -237,7 +232,7 @@ async def post_containers_tasks_ports_inputs_pull( @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_ports_outputs_pull( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, port_keys: list[str] | None = None, ) -> Response: port_keys = [] if port_keys is None else port_keys @@ -247,7 +242,7 @@ async def post_containers_tasks_ports_outputs_pull( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_ports_outputs_push( - self, dynamic_sidecar_endpoint: AnyHttpUrl + self, dynamic_sidecar_endpoint: str ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs:push") return await self.client.post(url) @@ -255,7 +250,7 @@ async def post_containers_tasks_ports_outputs_push( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_restart( - self, dynamic_sidecar_endpoint: AnyHttpUrl + self, dynamic_sidecar_endpoint: str ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers:restart") return await self.client.post(url) @@ -264,7 +259,7 @@ async def post_containers_tasks_restart( @expect_status(status.HTTP_204_NO_CONTENT) async def put_volumes( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, volume_category: VolumeCategory, volume_status: VolumeStatus, ) -> Response: @@ -275,7 +270,7 @@ async def put_volumes( @retry_on_errors() @expect_status(status.HTTP_200_OK) async def proxy_config_load( - self, proxy_endpoint: AnyHttpUrl, proxy_configuration: dict[str, Any] + self, proxy_endpoint: str, proxy_configuration: dict[str, Any] ) -> Response: url = self._get_url(proxy_endpoint, "load", no_api_version=True) return await self.client.post(url, json=proxy_configuration) @@ -284,7 +279,7 @@ async def proxy_config_load( @expect_status(status.HTTP_200_OK) async def get_containers_activity( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/activity") return await self.client.get(url) @@ -293,7 +288,7 @@ async def get_containers_activity( @expect_status(status.HTTP_204_NO_CONTENT) async def post_disk_reserved_free( self, - dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_endpoint: str, ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "disk/reserved:free") return await self.client.post(url) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py index b68467a572d..04853661c47 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py @@ -40,7 +40,7 @@ from models_library.services_types import ServicePortKey from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import AnyHttpUrl, NonNegativeFloat +from pydantic import NonNegativeFloat from servicelib.background_task import ( cancel_task, start_periodic_task, @@ -455,7 +455,7 @@ async def retrieve_service_inputs( service_name = self._inverse_search_mapping[node_uuid] scheduler_data: SchedulerData = self._to_observe[service_name] - dynamic_sidecar_endpoint: AnyHttpUrl = scheduler_data.endpoint + dynamic_sidecar_endpoint = scheduler_data.endpoint sidecars_client: SidecarsClient = await get_sidecars_client(self.app, node_uuid) started = time.time() From a55950b92c3909c6ef6a47cefa54489d6d7df5b7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 15:15:03 +0100 Subject: [PATCH 063/121] fix datetime.utc --- .../unit/with_dbs/test_utils_rabbitmq.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py index 0835852f899..a041f70ecc7 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py @@ -177,7 +177,7 @@ async def test_publish_service_resource_tracking_started( RabbitResourceTrackingBaseMessage.get_channel_name(), mocked_message_parser ) random_service_run_id = faker.pystr() - before_publication_time = datetime.datetime.now(datetime.timezone.utc) + before_publication_time = datetime.datetime.now(datetime.UTC) await publish_service_resource_tracking_started( publisher, service_run_id=random_service_run_id, @@ -205,9 +205,11 @@ async def test_publish_service_resource_tracking_started( service_resources={}, service_additional_metadata=faker.pydict(), ) - after_publication_time = datetime.datetime.now(datetime.timezone.utc) + after_publication_time = datetime.datetime.now(datetime.UTC) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingStartedMessage.model_validate_json + mocked_message_parser, + 1, + RabbitResourceTrackingStartedMessage.model_validate_json, ) assert isinstance(received_messages[0], RabbitResourceTrackingStartedMessage) assert received_messages[0].service_run_id == random_service_run_id @@ -231,7 +233,7 @@ async def test_publish_service_resource_tracking_stopped( RabbitResourceTrackingBaseMessage.get_channel_name(), mocked_message_parser ) random_service_run_id = faker.pystr() - before_publication_time = datetime.datetime.now(datetime.timezone.utc) + before_publication_time = datetime.datetime.now(datetime.UTC) await publish_service_resource_tracking_stopped( publisher, service_run_id=random_service_run_id, @@ -239,9 +241,11 @@ async def test_publish_service_resource_tracking_stopped( list(SimcorePlatformStatus) ), ) - after_publication_time = datetime.datetime.now(datetime.timezone.utc) + after_publication_time = datetime.datetime.now(datetime.UTC) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingStoppedMessage.model_validate_json + mocked_message_parser, + 1, + RabbitResourceTrackingStoppedMessage.model_validate_json, ) assert isinstance(received_messages[0], RabbitResourceTrackingStoppedMessage) assert received_messages[0].service_run_id == random_service_run_id @@ -265,14 +269,16 @@ async def test_publish_service_resource_tracking_heartbeat( RabbitResourceTrackingBaseMessage.get_channel_name(), mocked_message_parser ) random_service_run_id = faker.pystr() - before_publication_time = datetime.datetime.now(datetime.timezone.utc) + before_publication_time = datetime.datetime.now(datetime.UTC) await publish_service_resource_tracking_heartbeat( publisher, service_run_id=random_service_run_id, ) - after_publication_time = datetime.datetime.now(datetime.timezone.utc) + after_publication_time = datetime.datetime.now(datetime.UTC) received_messages = await _assert_message_received( - mocked_message_parser, 1, RabbitResourceTrackingHeartbeatMessage.model_validate_json + mocked_message_parser, + 1, + RabbitResourceTrackingHeartbeatMessage.model_validate_json, ) assert isinstance(received_messages[0], RabbitResourceTrackingHeartbeatMessage) assert received_messages[0].service_run_id == random_service_run_id From c14aaff6dacc2ea1575af125c11f6c918d6308c3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 15:29:55 +0100 Subject: [PATCH 064/121] remove deprecated --- .../db/repositories/comp_tasks/_utils.py | 18 +++++--- .../docker_service_specs/settings.py | 2 +- .../scheduler/_core/_events_utils.py | 6 +-- .../utils/comp_scheduler.py | 2 +- .../simcore_service_director_v2/utils/dask.py | 8 ++-- .../tests/helpers/shared_comp_utils.py | 4 +- .../integration/01/test_computation_api.py | 2 +- ...t_dynamic_sidecar_nodeports_integration.py | 6 ++- services/director-v2/tests/unit/conftest.py | 8 ++-- .../unit/test_models_dynamic_services.py | 2 +- .../unit/test_utils_client_decorators.py | 6 +-- .../unit/with_dbs/test_api_route_clusters.py | 43 +++++++++++++------ ...test_modules_dynamic_sidecar_docker_api.py | 2 +- ...es_dynamic_sidecar_docker_service_specs.py | 39 +++++++++-------- 14 files changed, 88 insertions(+), 60 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index 41b1b92c9e8..ef63ff4b823 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -174,7 +174,9 @@ async def _generate_task_image( } project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) project_node = await project_nodes_repo.get(connection, node_id=node_id) - node_resources = TypeAdapter(ServiceResourcesDict).validate_python(project_node.required_resources) + node_resources = TypeAdapter(ServiceResourcesDict).validate_python( + project_node.required_resources + ) if not node_resources: node_resources = await catalog_client.get_service_resources( user_id, node.key, node.version @@ -287,7 +289,9 @@ def _by_type_name(ec2: EC2InstanceTypeGet) -> bool: # less memory than the machine theoretical amount project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) node = await project_nodes_repo.get(connection, node_id=node_id) - node_resources = TypeAdapter(ServiceResourcesDict).validate_python(node.required_resources) + node_resources = TypeAdapter(ServiceResourcesDict).validate_python( + node.required_resources + ) if DEFAULT_SINGLE_SERVICE_NAME in node_resources: image_resources: ImageResources = node_resources[ DEFAULT_SINGLE_SERVICE_NAME @@ -322,7 +326,7 @@ def _by_type_name(ec2: EC2InstanceTypeGet) -> bool: except ( RemoteMethodNotRegisteredError, RPCServerError, - asyncio.TimeoutError, + TimeoutError, ) as exc: raise ClustersKeeperNotAvailableError from exc @@ -343,7 +347,7 @@ async def generate_tasks_list_from_project( list_comp_tasks = [] unique_service_key_versions: set[ServiceKeyVersion] = { - ServiceKeyVersion.construct( + ServiceKeyVersion.model_construct( key=node.key, version=node.version ) # the service key version is frozen for node in project.workbench.values() @@ -362,7 +366,7 @@ async def generate_tasks_list_from_project( for internal_id, node_id in enumerate(project.workbench, 1): node: Node = project.workbench[node_id] - node_key_version = ServiceKeyVersion.construct( + node_key_version = ServiceKeyVersion.model_construct( key=node.key, version=node.version ) node_details, node_extras, node_labels = key_version_to_node_infos.get( @@ -431,7 +435,7 @@ async def generate_tasks_list_from_project( project_id=project.uuid, node_id=NodeID(node_id), schema=NodeSchema.model_validate( - node_details.dict( + node_details.model_dump( exclude_unset=True, by_alias=True, include={"inputs", "outputs"} ) ), @@ -446,7 +450,7 @@ async def generate_tasks_list_from_project( last_heartbeat=None, created=arrow.utcnow().datetime, modified=arrow.utcnow().datetime, - pricing_info=pricing_info.dict(exclude={"pricing_unit_cost"}) + pricing_info=pricing_info.model_dump(exclude={"pricing_unit_cost"}) if pricing_info else None, hardware_info=hardware_info, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py index 7ffd87a4561..78a1201a714 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py @@ -399,7 +399,7 @@ def _format_env_var(env_var: str, destination_container: list[str]) -> str: def _get_boot_options( service_labels: SimcoreServiceLabels, ) -> dict[EnvVarKey, BootOption] | None: - as_dict = service_labels.dict() + as_dict = service_labels.model_dump() boot_options_encoded = as_dict.get("io.simcore.boot-options", None) if boot_options_encoded is None: return None diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py index 3653147eddb..e861ad9f30c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py @@ -535,7 +535,7 @@ async def _restore_service_state_with_metrics() -> None: ) ) service_outputs_labels = json.loads( - simcore_service_labels.dict().get("io.simcore.outputs", "{}") + simcore_service_labels.model_dump().get("io.simcore.outputs", "{}") ).get("outputs", {}) _logger.debug( "Creating dirs from service outputs labels: %s", @@ -563,7 +563,7 @@ async def get_allow_metrics_collection( bool, AllowMetricsCollectionFrontendUserPreference.get_default_value() ) - allow_metrics_collection = AllowMetricsCollectionFrontendUserPreference.model_validate( - preference + allow_metrics_collection = ( + AllowMetricsCollectionFrontendUserPreference.model_validate(preference) ) return allow_metrics_collection.value diff --git a/services/director-v2/src/simcore_service_director_v2/utils/comp_scheduler.py b/services/director-v2/src/simcore_service_director_v2/utils/comp_scheduler.py index e2310c4914a..15f3481da10 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/comp_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/comp_scheduler.py @@ -66,7 +66,7 @@ def create_service_resources_from_task(task: CompTaskAtDB) -> ServiceResourcesDi DockerGenericTag(f"{task.image.name}:{task.image.tag}"), { res_name: ResourceValue(limit=res_value, reservation=res_value) - for res_name, res_value in task.image.node_requirements.dict( + for res_name, res_value in task.image.node_requirements.model_dump( by_alias=True ).items() if res_value is not None diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index e8f010ec88e..cb47232cc9b 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -469,7 +469,7 @@ def from_node_reqs_to_dask_resources( node_reqs: NodeRequirements, ) -> dict[str, int | float]: """Dask resources are set such as {"CPU": X.X, "GPU": Y.Y, "RAM": INT}""" - dask_resources: dict[str, int | float] = node_reqs.dict( + dask_resources: dict[str, int | float] = node_reqs.model_dump( exclude_unset=True, by_alias=True, exclude_none=True, @@ -551,9 +551,9 @@ def _to_human_readable_resource_values(resources: dict[str, Any]) -> dict[str, A for res_name, res_value in resources.items(): if "RAM" in res_name: try: - human_readable_resources[res_name] = TypeAdapter(ByteSize).validate_python( - res_value - ).human_readable() + human_readable_resources[res_name] = ( + TypeAdapter(ByteSize).validate_python(res_value).human_readable() + ) except ValidationError: _logger.warning( "could not parse %s:%s, please check what changed in how Dask prepares resources!", diff --git a/services/director-v2/tests/helpers/shared_comp_utils.py b/services/director-v2/tests/helpers/shared_comp_utils.py index 5670ffc5de8..3352be68b33 100644 --- a/services/director-v2/tests/helpers/shared_comp_utils.py +++ b/services/director-v2/tests/helpers/shared_comp_utils.py @@ -43,8 +43,8 @@ async def assert_computation_task_out_obj( assert task_out.iteration == iteration assert task_out.cluster_id == cluster_id # check pipeline details contents - received_task_out_pipeline = task_out.pipeline_details.dict() - expected_task_out_pipeline = exp_pipeline_details.dict() + received_task_out_pipeline = task_out.pipeline_details.model_dump() + expected_task_out_pipeline = exp_pipeline_details.model_dump() assert received_task_out_pipeline == expected_task_out_pipeline diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index 23b9cf36cc1..47f130a98be 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -67,7 +67,7 @@ def mock_env( "COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED": "1", "COMPUTATIONAL_BACKEND_ENABLED": "1", "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL": dask_scheduler_service, - "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": dask_scheduler_auth.json(), + "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": dask_scheduler_auth.model_dump_json(), "DYNAMIC_SIDECAR_IMAGE": dynamic_sidecar_docker_image_name, "SIMCORE_SERVICES_NETWORK_NAME": "test_swarm_network_name", "SWARM_STACK_NAME": "test_mocked_stack_name", diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index c418a4589db..bb900967061 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -463,7 +463,7 @@ async def projects_networks_db( engine: Engine = initialized_app.state.engine async with engine.acquire() as conn: - row_data = projects_networks_to_insert.dict() + row_data = projects_networks_to_insert.model_dump() insert_stmt = pg_insert(projects_networks).values(**row_data) upsert_snapshot = insert_stmt.on_conflict_do_update( constraint=projects_networks.primary_key, set_=row_data @@ -841,7 +841,9 @@ async def _debug_progress_callback( Client( app=initialized_app, async_client=director_v2_client, - base_url=TypeAdapter(AnyHttpUrl).validate_python(f"{director_v2_client.base_url}"), + base_url=TypeAdapter(AnyHttpUrl).validate_python( + f"{director_v2_client.base_url}" + ), ), task_id, task_timeout=60, diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index 76ecd742510..c45fdce372c 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -10,7 +10,6 @@ from unittest import mock import aiodocker -from pydantic import TypeAdapter import pytest import respx from faker import Faker @@ -28,6 +27,7 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import RunID, ServiceKey, ServiceKeyVersion, ServiceVersion from models_library.services_enums import ServiceState +from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.s3 import S3Settings @@ -200,7 +200,7 @@ def mocked_storage_service_api( respx_mock.post( "/simcore-s3:access", name="get_or_create_temporary_s3_access", - ).respond(json={"data": fake_s3_settings.dict(by_alias=True)}) + ).respond(json={"data": fake_s3_settings.model_dump(by_alias=True)}) yield respx_mock @@ -211,7 +211,9 @@ def mocked_storage_service_api( @pytest.fixture def mock_service_key_version() -> ServiceKeyVersion: return ServiceKeyVersion( - key=TypeAdapter(ServiceKey).validate_python("simcore/services/dynamic/myservice"), + key=TypeAdapter(ServiceKey).validate_python( + "simcore/services/dynamic/myservice" + ), version=TypeAdapter(ServiceVersion).validate_python("1.4.5"), ) diff --git a/services/director-v2/tests/unit/test_models_dynamic_services.py b/services/director-v2/tests/unit/test_models_dynamic_services.py index e28cd51ca7f..99a22ece3bb 100644 --- a/services/director-v2/tests/unit/test_models_dynamic_services.py +++ b/services/director-v2/tests/unit/test_models_dynamic_services.py @@ -138,7 +138,7 @@ def test_running_service_details_make_status( print(running_service_details) assert running_service_details - running_service_details_dict = running_service_details.dict( + running_service_details_dict = running_service_details.model_dump( exclude_unset=True, by_alias=True ) diff --git a/services/director-v2/tests/unit/test_utils_client_decorators.py b/services/director-v2/tests/unit/test_utils_client_decorators.py index 066bedad11b..5b630f788c7 100644 --- a/services/director-v2/tests/unit/test_utils_client_decorators.py +++ b/services/director-v2/tests/unit/test_utils_client_decorators.py @@ -35,10 +35,10 @@ async def a_request(method: str, **kwargs) -> Response: await a_request( "POST", url=url, - params=dict(kettle="boiling"), - data=dict(kettle_number="royal_01"), + params={"kettle": "boiling"}, + data={"kettle_number": "royal_01"}, ) - assert status.HTTP_503_SERVICE_UNAVAILABLE == exec_info.value.status_code + assert exec_info.value.status_code == status.HTTP_503_SERVICE_UNAVAILABLE # ERROR test_utils_client_decorators:client_decorators.py:76 AService service error: # |Request| diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py index 149bd282b78..c3f3ab63965 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py @@ -94,7 +94,9 @@ async def test_list_clusters( # there is no cluster at the moment, the list shall contain the default cluster response = await async_client.get(list_clusters_url) assert response.status_code == status.HTTP_200_OK - returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python(response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python( + response.json() + ) assert ( len(returned_clusters_list) == 1 ), f"no default cluster in {returned_clusters_list=}" @@ -109,7 +111,9 @@ async def test_list_clusters( response = await async_client.get(list_clusters_url) assert response.status_code == status.HTTP_200_OK - returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python(response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python( + response.json() + ) assert ( len(returned_clusters_list) == NUM_CLUSTERS + 1 ) # the default cluster comes on top of the NUM_CLUSTERS @@ -121,7 +125,9 @@ async def test_list_clusters( user_2 = registered_user() response = await async_client.get(f"/v2/clusters?user_id={user_2['id']}") assert response.status_code == status.HTTP_200_OK - returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python(response.json()) + returned_clusters_list = TypeAdapter(list[ClusterGet]).validate_python( + response.json() + ) assert ( len(returned_clusters_list) == 1 ), f"no default cluster in {returned_clusters_list=}" @@ -189,9 +195,9 @@ async def test_get_cluster( assert response.status_code == status.HTTP_200_OK, f"received {response.text}" returned_cluster = ClusterGet.model_validate(response.json()) assert returned_cluster - assert the_cluster.dict(exclude={"authentication"}) == returned_cluster.dict( + assert the_cluster.model_dump( exclude={"authentication"} - ) + ) == returned_cluster.model_dump(exclude={"authentication"}) user_2 = registered_user() # getting the same cluster for user 2 shall return 403 @@ -322,9 +328,9 @@ async def test_create_cluster( created_cluster = ClusterGet.model_validate(response.json()) assert created_cluster - assert cluster_data.dict( + assert cluster_data.model_dump( exclude={"id", "owner", "access_rights", "authentication"} - ) == created_cluster.dict( + ) == created_cluster.model_dump( exclude={"id", "owner", "access_rights", "authentication"} ) @@ -413,7 +419,9 @@ async def test_update_own_cluster( ) assert returned_cluster.model_dump( exclude={"authentication": {"password"}} - ) == expected_modified_cluster.model_dump(exclude={"authentication": {"password"}}) + ) == expected_modified_cluster.model_dump( + exclude={"authentication": {"password"}} + ) # we can change the access rights, the owner rights are always kept user_2 = registered_user() @@ -435,7 +443,9 @@ async def test_update_own_cluster( expected_modified_cluster.access_rights[user_2["primary_gid"]] = rights assert returned_cluster.model_dump( exclude={"authentication": {"password"}} - ) == expected_modified_cluster.model_dump(exclude={"authentication": {"password"}}) + ) == expected_modified_cluster.model_dump( + exclude={"authentication": {"password"}} + ) # we can change the owner since we are admin cluster_patch = ClusterPatch(owner=user_2["primary_gid"]) response = await async_client.patch( @@ -729,7 +739,8 @@ async def test_ping_invalid_cluster_raises_422( # calling with correct data but non existing cluster also raises some_fake_cluster = ClusterPing( endpoint=faker.url(), - authentication=TypeAdapter(ClusterAuthentication).validate_python(cluster_simple_authentication() + authentication=TypeAdapter(ClusterAuthentication).validate_python( + cluster_simple_authentication() ), ) response = await async_client.post( @@ -750,10 +761,14 @@ async def test_ping_cluster( local_dask_gateway_server: DaskGatewayServer, ): valid_cluster = ClusterPing( - endpoint=TypeAdapter(AnyHttpUrl).validate_python(local_dask_gateway_server.address), + endpoint=TypeAdapter(AnyHttpUrl).validate_python( + local_dask_gateway_server.address + ), authentication=SimpleAuthentication( username="pytest_user", - password=TypeAdapter(SecretStr).validate_python(local_dask_gateway_server.password), + password=TypeAdapter(SecretStr).validate_python( + local_dask_gateway_server.password + ), ), ) response = await async_client.post( @@ -791,7 +806,9 @@ async def test_ping_specific_cluster( endpoint=local_dask_gateway_server.address, authentication=SimpleAuthentication( username="pytest_user", - password=TypeAdapter(SecretStr).validate_python(local_dask_gateway_server.password), + password=TypeAdapter(SecretStr).validate_python( + local_dask_gateway_server.password + ), ), ) for n in range(111) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 02facb9df44..f36a8f8f7f6 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -393,7 +393,7 @@ def test_settings__valid_network_names( monkeypatch: pytest.MonkeyPatch, dynamic_services_scheduler_settings: DynamicServicesSchedulerSettings, ) -> None: - items = dynamic_services_scheduler_settings.dict() + items = dynamic_services_scheduler_settings.model_dump() items["SIMCORE_SERVICES_NETWORK_NAME"] = simcore_services_network_name # validate network names diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 907ba2776bd..47d4508c1c3 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -4,7 +4,6 @@ import json -from collections.abc import Mapping from typing import Any, cast from unittest.mock import Mock @@ -471,7 +470,7 @@ async def test_get_dynamic_proxy_spec( rpc_client=Mock(), ) - exclude_keys: Mapping[int | str, Any] = { + exclude_keys = { "Labels": True, "TaskTemplate": {"ContainerSpec": {"Env": True}}, } @@ -512,29 +511,31 @@ async def test_get_dynamic_proxy_spec( ) ) - assert dynamic_sidecar_spec.dict( - exclude=exclude_keys - ) == expected_dynamic_sidecar_spec_model.dict(exclude=exclude_keys) - assert dynamic_sidecar_spec.Labels - assert expected_dynamic_sidecar_spec_model.Labels - assert sorted(dynamic_sidecar_spec.Labels.keys()) == sorted( - expected_dynamic_sidecar_spec_model.Labels.keys() + assert dynamic_sidecar_spec.model_dump( + exclude=exclude_keys # type: ignore[arg-type] + ) == expected_dynamic_sidecar_spec_model.model_dump( + exclude=exclude_keys # type: ignore[arg-type] + ) + assert dynamic_sidecar_spec.labels + assert expected_dynamic_sidecar_spec_model.labels + assert sorted(dynamic_sidecar_spec.labels.keys()) == sorted( + expected_dynamic_sidecar_spec_model.labels.keys() ) assert ( - dynamic_sidecar_spec.Labels["io.simcore.scheduler-data"] - == expected_dynamic_sidecar_spec_model.Labels["io.simcore.scheduler-data"] + dynamic_sidecar_spec.labels["io.simcore.scheduler-data"] + == expected_dynamic_sidecar_spec_model.labels["io.simcore.scheduler-data"] ) - assert dynamic_sidecar_spec.Labels == expected_dynamic_sidecar_spec_model.Labels + assert dynamic_sidecar_spec.labels == expected_dynamic_sidecar_spec_model.labels dynamic_sidecar_spec_accumulated = dynamic_sidecar_spec # check reference after multiple runs assert dynamic_sidecar_spec_accumulated is not None assert ( - dynamic_sidecar_spec_accumulated.dict() - == expected_dynamic_sidecar_spec_model.dict() + dynamic_sidecar_spec_accumulated.model_dump() + == expected_dynamic_sidecar_spec_model.model_dump() ) @@ -569,10 +570,10 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( rpc_client=Mock(), ) assert dynamic_sidecar_spec - dynamic_sidecar_spec_dict = dynamic_sidecar_spec.dict() + dynamic_sidecar_spec_dict = dynamic_sidecar_spec.model_dump() expected_dynamic_sidecar_spec_dict = AioDockerServiceSpec.model_validate( expected_dynamic_sidecar_spec - ).dict() + ).model_dump() # ensure some entries are sorted the same to prevent flakyness for sorted_dict in [dynamic_sidecar_spec_dict, expected_dynamic_sidecar_spec_dict]: for key in ["DY_SIDECAR_STATE_EXCLUDE", "DY_SIDECAR_STATE_PATHS"]: @@ -604,8 +605,10 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( ) assert user_aiodocker_service_spec - orig_dict = dynamic_sidecar_spec.dict(by_alias=True, exclude_unset=True) - user_dict = user_aiodocker_service_spec.dict(by_alias=True, exclude_unset=True) + orig_dict = dynamic_sidecar_spec.model_dump(by_alias=True, exclude_unset=True) + user_dict = user_aiodocker_service_spec.model_dump( + by_alias=True, exclude_unset=True + ) another_merged_dict = nested_update( orig_dict, From b6194a11e728e74e3560bd4753ff09d113275442 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 15:40:52 +0100 Subject: [PATCH 065/121] fix validator --- .../api_schemas_directorv2/comp_tasks.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py b/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py index e383d45f20e..821b5f8838e 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/comp_tasks.py @@ -1,7 +1,14 @@ from typing import Any, TypeAlias from models_library.basic_types import IDStr -from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, field_validator +from pydantic import ( + AnyHttpUrl, + AnyUrl, + BaseModel, + Field, + ValidationInfo, + field_validator, +) from ..clusters import ClusterID from ..projects import ProjectID @@ -52,16 +59,18 @@ class ComputationCreate(BaseModel): @field_validator("product_name") @classmethod - def ensure_product_name_defined_if_computation_starts(cls, v, values): - if "start_pipeline" in values and values["start_pipeline"] and v is None: + def _ensure_product_name_defined_if_computation_starts( + cls, v, info: ValidationInfo + ): + if info.data.get("start_pipeline") and v is None: msg = "product_name must be set if computation shall start!" raise ValueError(msg) return v @field_validator("use_on_demand_clusters") @classmethod - def ensure_expected_options(cls, v, values): - if v is True and ("cluster_id" in values and values["cluster_id"] is not None): + def _ensure_expected_options(cls, v, info: ValidationInfo): + if v and info.data.get("cluster_id") is not None: msg = "cluster_id cannot be set if use_on_demand_clusters is set" raise ValueError(msg) return v From 93226cd8b7deab6a60f5ed77b0f5c23c0f805693 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 15:47:26 +0100 Subject: [PATCH 066/121] fix url --- .../src/simcore_service_director_v2/utils/dask_client_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask_client_utils.py b/services/director-v2/src/simcore_service_director_v2/utils/dask_client_utils.py index 2deb203780b..15e6e98dfce 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask_client_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask_client_utils.py @@ -216,7 +216,7 @@ async def test_scheduler_endpoint( try: if _is_dask_scheduler(authentication): async with distributed.Client( - address=endpoint, timeout=f"{_PING_TIMEOUT_S}", asynchronous=True + address=f"{endpoint}", timeout=f"{_PING_TIMEOUT_S}", asynchronous=True ) as dask_client: if dask_client.status != _DASK_SCHEDULER_RUNNING_STATE: msg = "internal scheduler is not running!" From 67da4f894aafef93e2480f18078ffd69a50d5dbf Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 16:02:05 +0100 Subject: [PATCH 067/121] fix model_dumps --- .../unit/with_dbs/test_api_route_clusters.py | 85 ++++++------------- 1 file changed, 26 insertions(+), 59 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py index c3f3ab63965..36c3c019d4e 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py @@ -2,7 +2,6 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -import json import random from collections.abc import Callable, Iterator from typing import Any @@ -11,6 +10,7 @@ import pytest import sqlalchemy as sa from _dask_helpers import DaskGatewayServer +from common_library.serialization import model_dump_with_secrets from distributed.deploy.spec import SpecCluster from faker import Faker from httpx import URL @@ -32,7 +32,6 @@ ) from pydantic import AnyHttpUrl, SecretStr, TypeAdapter from pytest_simcore.helpers.typing_env import EnvVarsDict -from settings_library.utils_encoders import create_json_encoder_wo_secrets from simcore_postgres_database.models.clusters import ClusterType, clusters from starlette import status @@ -313,15 +312,15 @@ async def test_create_cluster( authentication=cluster_simple_authentication(), name=faker.name(), type=random.choice(list(ClusterType)), + owner=faker.pyint(min_value=1), ) response = await async_client.post( create_cluster_url, - json=json.loads( - cluster_data.model_dump_json( - by_alias=True, - exclude_unset=True, - encoder=create_json_encoder_wo_secrets(ClusterCreate), - ) + json=model_dump_with_secrets( + cluster_data, + show_secrets=True, + by_alias=True, + exclude_unset=True, ), ) assert response.status_code == status.HTTP_201_CREATED, f"received: {response.text}" @@ -360,10 +359,8 @@ async def test_update_own_cluster( # try to modify one that does not exist response = await async_client.patch( f"/v2/clusters/15615165165165?user_id={user_1['id']}", - json=json.loads( - ClusterPatch().model_dump_json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) + json=model_dump_with_secrets( + ClusterPatch(), show_secrets=True, **_PATCH_EXPORT ), ) assert response.status_code == status.HTTP_404_NOT_FOUND @@ -382,10 +379,8 @@ async def test_update_own_cluster( # now we modify nothing response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", - json=json.loads( - ClusterPatch().model_dump_json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) + json=model_dump_with_secrets( + ClusterPatch(), show_secrets=True, **_PATCH_EXPORT ), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" @@ -402,10 +397,8 @@ async def test_update_own_cluster( ClusterPatch(endpoint=faker.uri()), ClusterPatch(authentication=cluster_simple_authentication()), ]: - jsonable_cluster_patch = json.loads( - cluster_patch.json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) + jsonable_cluster_patch = model_dump_with_secrets( + cluster_patch, show_secrets=True, **_PATCH_EXPORT ) print(f"--> patching cluster with {jsonable_cluster_patch}") response = await async_client.patch( @@ -450,11 +443,7 @@ async def test_update_own_cluster( cluster_patch = ClusterPatch(owner=user_2["primary_gid"]) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", - json=json.loads( - cluster_patch.model_dump_json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) - ), + json=model_dump_with_secrets(cluster_patch, show_secrets=True, **_PATCH_EXPORT), ) assert response.status_code == status.HTTP_200_OK, f"received {response.text}" returned_cluster = ClusterGet.model_validate(response.json()) @@ -472,11 +461,7 @@ async def test_update_own_cluster( ) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_1['id']}", - json=json.loads( - cluster_patch.model_dump_json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) - ), + json=model_dump_with_secrets(cluster_patch, show_secrets=True, **_PATCH_EXPORT), ) assert ( response.status_code == status.HTTP_403_FORBIDDEN @@ -496,10 +481,8 @@ async def test_update_default_cluster_fails( # try to modify one that does not exist response = await async_client.patch( f"/v2/clusters/default?user_id={user_1['id']}", - json=json.loads( - ClusterPatch().model_dump_json( - **_PATCH_EXPORT, encoder=create_json_encoder_wo_secrets(ClusterPatch) - ) + json=model_dump_with_secrets( + ClusterPatch(), show_secrets=True, **_PATCH_EXPORT ), ) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY @@ -568,11 +551,8 @@ async def test_update_another_cluster( ]: response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}", - json=json.loads( - cluster_patch.json( - **_PATCH_EXPORT, - encoder=create_json_encoder_wo_secrets(ClusterPatch), - ) + json=model_dump_with_secrets( + cluster_patch, show_secrets=True, **_PATCH_EXPORT ), ) assert ( @@ -591,11 +571,8 @@ async def test_update_another_cluster( cluster_patch = ClusterPatch(accessRights={user_3["primary_gid"]: rights}) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}", - json=json.loads( - cluster_patch.model_dump_json( - **_PATCH_EXPORT, - encoder=create_json_encoder_wo_secrets(ClusterPatch), - ) + json=model_dump_with_secrets( + cluster_patch, show_secrets=True, **_PATCH_EXPORT ), ) assert ( @@ -612,11 +589,8 @@ async def test_update_another_cluster( cluster_patch = ClusterPatch(accessRights={user_3["primary_gid"]: rights}) response = await async_client.patch( f"/v2/clusters/{the_cluster.id}?user_id={user_2['id']}", - json=json.loads( - cluster_patch.model_dump_json( - **_PATCH_EXPORT, - encoder=create_json_encoder_wo_secrets(ClusterPatch), - ) + json=model_dump_with_secrets( + cluster_patch, show_secrets=True, **_PATCH_EXPORT ), ) assert ( @@ -745,10 +719,8 @@ async def test_ping_invalid_cluster_raises_422( ) response = await async_client.post( "/v2/clusters:ping", - json=json.loads( - some_fake_cluster.model_dump_json( - by_alias=True, encoder=create_json_encoder_wo_secrets(ClusterPing) - ) + json=model_dump_with_secrets( + some_fake_cluster, show_secrets=True, by_alias=True ), ) with pytest.raises(httpx.HTTPStatusError): @@ -773,12 +745,7 @@ async def test_ping_cluster( ) response = await async_client.post( "/v2/clusters:ping", - json=json.loads( - valid_cluster.model_dump_json( - by_alias=True, - encoder=create_json_encoder_wo_secrets(SimpleAuthentication), - ) - ), + json=model_dump_with_secrets(valid_cluster, show_secrets=True, alias=True), ) response.raise_for_status() assert response.status_code == status.HTTP_204_NO_CONTENT From 4dfad543f33fd501159f4cfe87a29cbe07523445 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 16:17:42 +0100 Subject: [PATCH 068/121] fix deprecated --- .../scripts/create_node-meta-schema.py | 2 +- .../src/settings_library/postgres.py | 36 +++++++++---------- .../db/repositories/projects_networks.py | 2 +- .../utils/clients.py | 4 +-- .../tests/helpers/shared_comp_utils.py | 5 +-- ...t_dynamic_sidecar_nodeports_integration.py | 2 +- .../director-v2/tests/integration/02/utils.py | 6 ++-- services/director-v2/tests/unit/conftest.py | 2 +- ...dels_schemas_dynamic_services_scheduler.py | 8 +++-- .../tests/unit/test_modules_rabbitmq.py | 2 +- .../unit/test_utils_distributed_identifier.py | 6 ++-- .../tests/unit/with_dbs/conftest.py | 4 ++- 12 files changed, 42 insertions(+), 37 deletions(-) diff --git a/api/specs/director/schemas/scripts/create_node-meta-schema.py b/api/specs/director/schemas/scripts/create_node-meta-schema.py index 29b4a02a9b2..61bb3e7c0f6 100644 --- a/api/specs/director/schemas/scripts/create_node-meta-schema.py +++ b/api/specs/director/schemas/scripts/create_node-meta-schema.py @@ -15,7 +15,7 @@ if __name__ == "__main__": with Path.open(CURRENT_DIR.parent / "node-meta-v0.0.1-pydantic.json", "w") as f: - schema = ServiceMetaDataPublished.schema_json() + schema = ServiceMetaDataPublished.model_json_schema() schema_without_ref = jsonref.loads(schema) json.dump(schema_without_ref, f, indent=2) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index 883d14c3bb4..173a81f40a3 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -56,31 +56,27 @@ def _check_size(cls, v, info: ValidationInfo): @cached_property def dsn(self) -> str: - dsn: str = str( - PostgresDsn.build( # pylint: disable=no-member - scheme="postgresql", - username=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=self.POSTGRES_PORT, - path=f"{self.POSTGRES_DB}", - ) + dsn = PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"{self.POSTGRES_DB}", ) - return dsn + return f"{dsn}" @cached_property def dsn_with_async_sqlalchemy(self) -> str: - dsn: str = str( - PostgresDsn.build( # pylint: disable=no-member - scheme="postgresql+asyncpg", - username=self.POSTGRES_USER, - password=self.POSTGRES_PASSWORD.get_secret_value(), - host=self.POSTGRES_HOST, - port=self.POSTGRES_PORT, - path=f"{self.POSTGRES_DB}", - ) + dsn = PostgresDsn.build( # pylint: disable=no-member + scheme="postgresql+asyncpg", + username=self.POSTGRES_USER, + password=self.POSTGRES_PASSWORD.get_secret_value(), + host=self.POSTGRES_HOST, + port=self.POSTGRES_PORT, + path=f"{self.POSTGRES_DB}", ) - return dsn + return f"{dsn}" @cached_property def dsn_with_query(self) -> str: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py index c2233030622..59334aa0a06 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects_networks.py @@ -33,7 +33,7 @@ async def upsert_projects_networks( ) async with self.db_engine.acquire() as conn: - row_data = json.loads(projects_networks_to_insert.json()) + row_data = json.loads(projects_networks_to_insert.model_dump_json()) insert_stmt = pg_insert(projects_networks).values(**row_data) upsert_snapshot = insert_stmt.on_conflict_do_update( constraint=projects_networks.primary_key, set_=row_data diff --git a/services/director-v2/src/simcore_service_director_v2/utils/clients.py b/services/director-v2/src/simcore_service_director_v2/utils/clients.py index d01d38a1907..e12cf2d09f0 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/clients.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/clients.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Union +from typing import Any import httpx from fastapi import HTTPException @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) -def unenvelope_or_raise_error(resp: httpx.Response) -> Union[list[Any], dict[str, Any]]: +def unenvelope_or_raise_error(resp: httpx.Response) -> list[Any] | dict[str, Any]: """ Director responses are enveloped If successful response, we un-envelop it and return data as a dict diff --git a/services/director-v2/tests/helpers/shared_comp_utils.py b/services/director-v2/tests/helpers/shared_comp_utils.py index 3352be68b33..8ee507f4a2b 100644 --- a/services/director-v2/tests/helpers/shared_comp_utils.py +++ b/services/director-v2/tests/helpers/shared_comp_utils.py @@ -64,7 +64,7 @@ async def assert_and_wait_for_pipeline_status( MAX_TIMEOUT_S = 5 * MINUTE async def check_pipeline_state() -> ComputationGet: - response = await client.get(url, params={"user_id": user_id}) + response = await client.get(f"{url}", params={"user_id": user_id}) assert ( response.status_code == status.HTTP_200_OK ), f"response code is {response.status_code}, error: {response.text}" @@ -100,4 +100,5 @@ async def check_pipeline_state() -> ComputationGet: return task_out # this is only to satisfy pylance - raise AssertionError("No computation task generated!") + msg = "No computation task generated!" + raise AssertionError(msg) diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index bb900967061..a160afd664f 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -403,7 +403,7 @@ def mock_env( "COMPUTATIONAL_BACKEND_ENABLED": "true", "COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED": "true", "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL": dask_scheduler_service, - "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": dask_scheduler_auth.json(), + "COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": dask_scheduler_auth.model_dump_json(), "DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED": "1", }, ) diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index db99170d9ed..02f6358e426 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -263,7 +263,9 @@ async def patch_dynamic_service_url(app: FastAPI, node_uuid: str) -> str: proxy_service_name, target_port=dynamic_sidecar_proxy_settings.DYNAMIC_SIDECAR_CADDY_ADMIN_API_PORT, ) - assert proxy_published_port is not None, f"{sidecar_settings.json()=}" + assert ( + proxy_published_port is not None + ), f"{sidecar_settings.model_dump_json()=}" async with scheduler.scheduler._lock: # noqa: SLF001 localhost_ip = get_localhost_ip() @@ -458,7 +460,7 @@ async def assert_retrieve_service( size_bytes = json_result["data"]["size_bytes"] assert size_bytes > 0 - assert type(size_bytes) == int + assert isinstance(size_bytes, int) async def assert_stop_service( diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index c45fdce372c..2856dffb5fe 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -123,7 +123,7 @@ def scheduler_data_from_http_request( def mock_service_inspect( scheduler_data_from_http_request: ServiceDetails, ) -> Mapping[str, Any]: - service_details = json.loads(scheduler_data_from_http_request.json()) + service_details = json.loads(scheduler_data_from_http_request.model_dump_json()) service_details["compose_spec"] = json.dumps(service_details["compose_spec"]) return { "Spec": { diff --git a/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py b/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py index dd6c6adf2d8..0bbd9bca526 100644 --- a/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py +++ b/services/director-v2/tests/unit/test_models_schemas_dynamic_services_scheduler.py @@ -4,8 +4,8 @@ from copy import deepcopy from pathlib import Path -from pydantic import TypeAdapter import pytest +from pydantic import TypeAdapter from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData @@ -20,7 +20,7 @@ def test_regression_as_label_data(scheduler_data: SchedulerData) -> None: # old tested implementation scheduler_data_copy = deepcopy(scheduler_data) scheduler_data_copy.compose_spec = json.dumps(scheduler_data_copy.compose_spec) - json_encoded = scheduler_data_copy.json() + json_encoded = scheduler_data_copy.model_dump_json() # using pydantic's internals label_data = scheduler_data.as_label_data() @@ -35,4 +35,6 @@ def test_ensure_legacy_format_compatibility(legacy_scheduler_data_format: Path): # PRs applying changes to the legacy format: # - https://github.com/ITISFoundation/osparc-simcore/pull/3610 - assert TypeAdapter(list[SchedulerData]).validate_json(legacy_scheduler_data_format.read_text()) + assert TypeAdapter(list[SchedulerData]).validate_json( + legacy_scheduler_data_format.read_text() + ) diff --git a/services/director-v2/tests/unit/test_modules_rabbitmq.py b/services/director-v2/tests/unit/test_modules_rabbitmq.py index 1d557d673a8..972f836f575 100644 --- a/services/director-v2/tests/unit/test_modules_rabbitmq.py +++ b/services/director-v2/tests/unit/test_modules_rabbitmq.py @@ -44,7 +44,7 @@ def message(faker: Faker) -> WalletCreditsLimitReachedMessage: async def test_handler_out_of_credits( mock_app: FastAPI, message: WalletCreditsLimitReachedMessage, ignore_limits ): - await handler_out_of_credits(mock_app, message.json().encode()) + await handler_out_of_credits(mock_app, message.model_dump_json().encode()) removal_mark_count = ( mock_app.state.dynamic_sidecar_scheduler.mark_all_services_in_wallet_for_removal.call_count diff --git a/services/director-v2/tests/unit/test_utils_distributed_identifier.py b/services/director-v2/tests/unit/test_utils_distributed_identifier.py index a9fd8a42a0a..518552af1e1 100644 --- a/services/director-v2/tests/unit/test_utils_distributed_identifier.py +++ b/services/director-v2/tests/unit/test_utils_distributed_identifier.py @@ -132,14 +132,16 @@ def _serialize_identifier(cls, identifier: UserDefinedID) -> str: return f"{identifier._id}" # noqa: SLF001 @classmethod - def _deserialize_cleanup_context(cls, raw: str | bytes) -> AnEmptyTextCleanupContext: + def _deserialize_cleanup_context( + cls, raw: str | bytes + ) -> AnEmptyTextCleanupContext: return AnEmptyTextCleanupContext.model_validate_json(raw) @classmethod def _serialize_cleanup_context( cls, cleanup_context: AnEmptyTextCleanupContext ) -> str: - return cleanup_context.json() + return cleanup_context.model_dump_json() async def is_used( self, identifier: UserDefinedID, cleanup_context: AnEmptyTextCleanupContext diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 7b32af07c8c..f1f05f4fe06 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -104,7 +104,9 @@ def creator( "outputs": ( { key: ( - json.loads(value.json(by_alias=True, exclude_unset=True)) + json.loads( + value.model_dump_json(by_alias=True, exclude_unset=True) + ) if isinstance(value, BaseModel) else value ) From 44557ebe5569558f177dcfc9335145759986d80d Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 16:26:41 +0100 Subject: [PATCH 069/121] change var --- .../settings-library/src/settings_library/postgres.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index 173a81f40a3..e65f02e6edc 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -56,7 +56,7 @@ def _check_size(cls, v, info: ValidationInfo): @cached_property def dsn(self) -> str: - dsn = PostgresDsn.build( # pylint: disable=no-member + url = PostgresDsn.build( # pylint: disable=no-member scheme="postgresql", username=self.POSTGRES_USER, password=self.POSTGRES_PASSWORD.get_secret_value(), @@ -64,11 +64,11 @@ def dsn(self) -> str: port=self.POSTGRES_PORT, path=f"{self.POSTGRES_DB}", ) - return f"{dsn}" + return f"{url}" @cached_property def dsn_with_async_sqlalchemy(self) -> str: - dsn = PostgresDsn.build( # pylint: disable=no-member + url = PostgresDsn.build( # pylint: disable=no-member scheme="postgresql+asyncpg", username=self.POSTGRES_USER, password=self.POSTGRES_PASSWORD.get_secret_value(), @@ -76,7 +76,7 @@ def dsn_with_async_sqlalchemy(self) -> str: port=self.POSTGRES_PORT, path=f"{self.POSTGRES_DB}", ) - return f"{dsn}" + return f"{url}" @cached_property def dsn_with_query(self) -> str: From c466996be0f7d2c4b520c4debd875e26d4c7a7aa Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 20:09:45 +0100 Subject: [PATCH 070/121] fix nodeid serialization --- .../models/comp_tasks.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 57e99695670..61b9e78878a 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -1,6 +1,6 @@ import datetime from contextlib import suppress -from typing import Any +from typing import Annotated, Any from dask_task_models_library.container_tasks.protocol import ContainerEnvsDict from models_library.api_schemas_directorv2.services import NodeRequirements @@ -19,6 +19,7 @@ ByteSize, ConfigDict, Field, + PlainSerializer, PositiveInt, TypeAdapter, ValidationInfo, @@ -74,7 +75,7 @@ def _migrate_from_requirements(cls, v, info: ValidationInfo): model_config = ConfigDict( from_attributes=True, json_schema_extra={ - "examples": [ # type: ignore + "examples": [ # type: ignore { "name": "simcore/services/dynamic/jupyter-octave-python-math", "tag": "1.3.1", @@ -115,7 +116,7 @@ class NodeSchema(BaseModel): class CompTaskAtDB(BaseModel): project_id: ProjectID - node_id: NodeID + node_id: Annotated[NodeID, PlainSerializer(str, return_type=str)] job_id: str | None = Field(default=None, description="The worker job ID") node_schema: NodeSchema = Field(..., alias="schema") inputs: InputsDict | None = Field(..., description="the inputs payload") @@ -177,7 +178,9 @@ def _backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo return v def to_db_model(self, **exclusion_rules) -> dict[str, Any]: - comp_task_dict = self.model_dump(by_alias=True, exclude_unset=True, **exclusion_rules) + comp_task_dict = self.model_dump( + by_alias=True, exclude_unset=True, **exclusion_rules + ) if "state" in comp_task_dict: comp_task_dict["state"] = RUNNING_STATE_TO_DB[comp_task_dict["state"]].value return comp_task_dict @@ -237,9 +240,9 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: "pricing_unit_id": 1, "pricing_unit_cost_id": 1, }, - "hardware_info": next(iter(HardwareInfo.model_config["json_schema_extra"]["examples"])), # type: ignore + "hardware_info": next(iter(HardwareInfo.model_config["json_schema_extra"]["examples"])), # type: ignore } - for image_example in Image.model_config["json_schema_extra"]["examples"] # type: ignore + for image_example in Image.model_config["json_schema_extra"]["examples"] # type: ignore ] }, ) From a0cf3178fa486c82c9007f3cd0d659adaae100bc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Fri, 8 Nov 2024 21:11:24 +0100 Subject: [PATCH 071/121] fix serialization --- .../src/common_library/serialization.py | 10 ++++++---- .../tests/test_serialization.py | 19 ++++++++++++++----- .../unit/test_modules_dask_clients_pool.py | 10 +++++----- 3 files changed, 25 insertions(+), 14 deletions(-) diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index b201f4115a8..b69e0f767ae 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -1,6 +1,7 @@ from datetime import timedelta -from typing import Any, get_origin +from typing import Any +from common_library.pydantic_fields_extension import get_type from pydantic import BaseModel, SecretStr from pydantic_core import Url @@ -20,14 +21,15 @@ def model_dump_with_secrets( data[field_name] = field_data.total_seconds() elif isinstance(field_data, SecretStr): - data[field_name] = field_data.get_secret_value() if show_secrets else str(field_data) - + data[field_name] = ( + field_data.get_secret_value() if show_secrets else str(field_data) + ) elif isinstance(field_data, Url): data[field_name] = str(field_data) elif isinstance(field_data, dict): - field_type = get_origin(settings_obj.model_fields[field_name].annotation) + field_type = get_type(settings_obj.model_fields[field_name]) if field_type and issubclass(field_type, BaseModel): data[field_name] = model_dump_with_secrets( field_type.model_validate(field_data), diff --git a/packages/common-library/tests/test_serialization.py b/packages/common-library/tests/test_serialization.py index d897ff5ec5d..d5dea70ec22 100644 --- a/packages/common-library/tests/test_serialization.py +++ b/packages/common-library/tests/test_serialization.py @@ -4,22 +4,31 @@ class Credentials(BaseModel): - USERNAME: str | None = None - PASSWORD: SecretStr | None = None + username: str + password: SecretStr + + +class Access(BaseModel): + credentials: Credentials @pytest.mark.parametrize( "expected,show_secrets", [ ( - {"USERNAME": "DeepThought", "PASSWORD": "42"}, + {"credentials": {"username": "DeepThought", "password": "42"}}, True, ), ( - {"USERNAME": "DeepThought", "PASSWORD": "**********"}, + {"credentials": {"username": "DeepThought", "password": "**********"}}, False, # hide secrets ), ], ) def test_model_dump_with_secrets(expected: dict, show_secrets: bool): - assert expected == model_dump_with_secrets(Credentials(USERNAME="DeepThought", PASSWORD=SecretStr("42")), show_secrets=show_secrets) + assert expected == model_dump_with_secrets( + Access( + credentials=Credentials(username="DeepThought", password=SecretStr("42")) + ), + show_secrets=show_secrets, + ) diff --git a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py index bf3f0787367..3bd1e318878 100644 --- a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py +++ b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py @@ -7,10 +7,10 @@ from typing import Any, AsyncIterator, Callable, get_args from unittest import mock -from common_library.json_serialization import json_dumps -from common_library.serialization import model_dump_with_secrets import pytest from _dask_helpers import DaskGatewayServer +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets from distributed.deploy.spec import SpecCluster from faker import Faker from models_library.clusters import ( @@ -26,7 +26,6 @@ from pydantic import SecretStr from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict -from settings_library.utils_encoders import create_json_encoder_wo_secrets from simcore_postgres_database.models.clusters import ClusterType from simcore_service_director_v2.core.application import init_app from simcore_service_director_v2.core.errors import ( @@ -133,9 +132,10 @@ def creator(): SimpleAuthentication( username=faker.user_name(), password=SecretStr(local_dask_gateway_server.password), - ), show_secrets=True + ), + show_secrets=True, ) - ) + ), ) return creator From 5b64e8e9cc64aed4df445595b3d79490274ae1dc Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 11:43:33 +0100 Subject: [PATCH 072/121] fix serialization --- .../common-library/src/common_library/serialization.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index b69e0f767ae..6a04389972b 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -1,8 +1,7 @@ from datetime import timedelta from typing import Any -from common_library.pydantic_fields_extension import get_type -from pydantic import BaseModel, SecretStr +from pydantic import BaseModel, SecretStr, TypeAdapter from pydantic_core import Url @@ -29,10 +28,10 @@ def model_dump_with_secrets( data[field_name] = str(field_data) elif isinstance(field_data, dict): - field_type = get_type(settings_obj.model_fields[field_name]) - if field_type and issubclass(field_type, BaseModel): + field_type = settings_obj.model_fields[field_name].annotation + if field_type: data[field_name] = model_dump_with_secrets( - field_type.model_validate(field_data), + TypeAdapter(field_type).validate_python(field_data), show_secrets=show_secrets, **pydantic_export_options, ) From 736064d91a6332d82e1b654e080833d6f056aa33 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 12:41:16 +0100 Subject: [PATCH 073/121] fix validator --- .../models-library/src/models_library/clusters.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py index 09540b8c16e..60e74b76b42 100644 --- a/packages/models-library/src/models_library/clusters.py +++ b/packages/models-library/src/models_library/clusters.py @@ -221,14 +221,14 @@ class Cluster(BaseCluster): }, ) - @model_validator(mode="before") + @model_validator(mode="after") @classmethod - def check_owner_has_access_rights(cls, values): - is_default_cluster = bool(values["id"] == DEFAULT_CLUSTER_ID) - owner_gid = values["owner"] + def _check_owner_has_access_rights(cls, values): + is_default_cluster = bool(values.id == DEFAULT_CLUSTER_ID) + owner_gid = values.owner # check owner is in the access rights, if not add it - access_rights = values.get("access_rights", values.get("accessRights", {})) + access_rights = values.access_rights or {} if owner_gid not in access_rights: access_rights[owner_gid] = ( CLUSTER_USER_RIGHTS if is_default_cluster else CLUSTER_ADMIN_RIGHTS @@ -239,5 +239,5 @@ def check_owner_has_access_rights(cls, values): ): msg = f"the cluster owner access rights are incorrectly set: {access_rights[owner_gid]}" raise ValueError(msg) - values["access_rights"] = access_rights + values.access_rights = access_rights return values From 87112f6fa8ec13b76d8eb71ced166d586713a5bd Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 13:27:35 +0100 Subject: [PATCH 074/121] fix fields --- .../src/models_library/aiodocker_api.py | 20 +++++++--------- .../unit/with_dbs/test_api_route_clusters.py | 2 +- ...es_dynamic_sidecar_docker_service_specs.py | 24 +++++++++---------- 3 files changed, 22 insertions(+), 24 deletions(-) diff --git a/packages/models-library/src/models_library/aiodocker_api.py b/packages/models-library/src/models_library/aiodocker_api.py index 865570fbe36..4f1c86b8f4d 100644 --- a/packages/models-library/src/models_library/aiodocker_api.py +++ b/packages/models-library/src/models_library/aiodocker_api.py @@ -11,12 +11,13 @@ class AioDockerContainerSpec(ContainerSpec): - Env: dict[str, str | None] | None = Field( + env: dict[str, str | None] | None = Field( # type: ignore[assignment] default=None, - description="aiodocker expects here a dictionary and re-convert it back internally`.\n", + alias="Env", + description="aiodocker expects here a dictionary and re-convert it back internally", ) - @field_validator("Env", mode="before") + @field_validator("env", mode="before") @classmethod def convert_list_to_dict(cls, v): if v is not None and isinstance(v, list): @@ -33,7 +34,7 @@ def convert_list_to_dict(cls, v): class AioDockerResources1(Resources1): # NOTE: The Docker REST API documentation is wrong!!! # Do not set that back to singular Reservation. - Reservation: ResourceObject | None = Field( + reservation: ResourceObject | None = Field( None, description="Define resources reservation.", alias="Reservations" ) @@ -41,17 +42,14 @@ class AioDockerResources1(Resources1): class AioDockerTaskSpec(TaskSpec): - ContainerSpec: AioDockerContainerSpec | None = Field( - None, + container_spec: AioDockerContainerSpec | None = Field( + default=None, alias="ContainerSpec" ) - Resources: AioDockerResources1 | None = Field( - None, - description="Resource requirements which apply to each individual container created\nas part of the service.\n", - ) + resources: AioDockerResources1 | None = Field(default=None, alias="Resources") class AioDockerServiceSpec(ServiceSpec): - TaskTemplate: AioDockerTaskSpec | None = None + task_template: AioDockerTaskSpec | None = Field(default=None, alias="TaskTemplate") model_config = ConfigDict(populate_by_name=True, alias_generator=camel_to_snake) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py index 36c3c019d4e..19ab0ea2df3 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters.py @@ -745,7 +745,7 @@ async def test_ping_cluster( ) response = await async_client.post( "/v2/clusters:ping", - json=model_dump_with_secrets(valid_cluster, show_secrets=True, alias=True), + json=model_dump_with_secrets(valid_cluster, show_secrets=True, by_alias=True), ) response.raise_for_status() assert response.status_code == status.HTTP_204_NO_CONTENT diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 47d4508c1c3..9e6a856ce4d 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -445,9 +445,9 @@ async def test_get_dynamic_proxy_spec( expected_dynamic_sidecar_spec_model = AioDockerServiceSpec.model_validate( expected_dynamic_sidecar_spec ) - assert expected_dynamic_sidecar_spec_model.TaskTemplate - assert expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec - assert expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec.Env + assert expected_dynamic_sidecar_spec_model.task_template + assert expected_dynamic_sidecar_spec_model.task_template.container_spec + assert expected_dynamic_sidecar_spec_model.task_template.container_spec.env for count in range(1, 11): # loop to check it does not repeat copies print(f"{count:*^50}") @@ -478,33 +478,33 @@ async def test_get_dynamic_proxy_spec( # NOTE: some flakiness here # state_exclude is a set and does not preserve order # when dumping to json it gets converted to a list - assert dynamic_sidecar_spec.TaskTemplate - assert dynamic_sidecar_spec.TaskTemplate.ContainerSpec - assert dynamic_sidecar_spec.TaskTemplate.ContainerSpec.Env - assert dynamic_sidecar_spec.TaskTemplate.ContainerSpec.Env[ + assert dynamic_sidecar_spec.task_template + assert dynamic_sidecar_spec.task_template.container_spec + assert dynamic_sidecar_spec.task_template.container_spec.env + assert dynamic_sidecar_spec.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] - dynamic_sidecar_spec.TaskTemplate.ContainerSpec.Env[ + dynamic_sidecar_spec.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] = json.dumps( sorted( json.loads( - dynamic_sidecar_spec.TaskTemplate.ContainerSpec.Env[ + dynamic_sidecar_spec.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] ) ) ) - assert expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec.Env[ + assert expected_dynamic_sidecar_spec_model.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] - expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec.Env[ + expected_dynamic_sidecar_spec_model.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] = json.dumps( sorted( json.loads( - expected_dynamic_sidecar_spec_model.TaskTemplate.ContainerSpec.Env[ + expected_dynamic_sidecar_spec_model.task_template.container_spec.env[ "DY_SIDECAR_STATE_EXCLUDE" ] ) From f795bcc8dc6dbd1b112d677a5c26fd2b063e68c3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 13:33:43 +0100 Subject: [PATCH 075/121] fix serialization --- services/director-v2/tests/unit/with_dbs/conftest.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index f1f05f4fe06..a3234328c9f 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -6,7 +6,6 @@ import datetime -import json from collections.abc import Awaitable, Callable, Iterator from typing import Any, cast from uuid import uuid4 @@ -92,7 +91,9 @@ def creator( "inputs": ( { key: ( - value.model_dump(by_alias=True, exclude_unset=True) + value.model_dump( + mode="json", by_alias=True, exclude_unset=True + ) if isinstance(value, BaseModel) else value ) @@ -104,8 +105,8 @@ def creator( "outputs": ( { key: ( - json.loads( - value.model_dump_json(by_alias=True, exclude_unset=True) + value.model_dump( + mode="json", by_alias=True, exclude_unset=True ) if isinstance(value, BaseModel) else value From a9983b34f9e0585262733bfd316da29613c4bc8e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 16:04:21 +0100 Subject: [PATCH 076/121] fix validator --- .../src/models_library/service_settings_labels.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index c60789028cd..7548c84478f 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -175,7 +175,7 @@ class PathMappingsLabel(BaseModel): description="folder path where the service is expected to provide all its outputs", ) state_paths: list[Path] = Field( - [], + default_factory=list, description="optional list of paths which contents need to be persisted", ) @@ -434,7 +434,7 @@ def _ensure_callbacks_mapping_container_names_defined_in_compose_spec( @field_validator("user_preferences_path", mode="before") @classmethod def _deserialize_from_json(cls, v): - return f"{v}".removeprefix('"').removesuffix('"') + return f"{v}".removeprefix('"').removesuffix('"') if v else None @field_validator("user_preferences_path") @classmethod From 845867d504818859bde4ec62eedeb59c9f72b0ec Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 16:11:38 +0100 Subject: [PATCH 077/121] fix test --- ...es_dynamic_sidecar_docker_service_specs.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 9e6a856ce4d..16032677a98 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -251,19 +251,19 @@ def expected_dynamic_sidecar_spec( "NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS": "3", "DYNAMIC_SIDECAR_LOG_LEVEL": "DEBUG", "DY_DEPLOYMENT_REGISTRY_SETTINGS": ( - '{"REGISTRY_AUTH": false, "REGISTRY_PATH": null, ' - '"REGISTRY_URL": "foo.bar.com", "REGISTRY_USER": ' - '"test", "REGISTRY_PW": "test", "REGISTRY_SSL": false}' + '{"REGISTRY_AUTH":false,"REGISTRY_PATH":null,' + '"REGISTRY_URL":"foo.bar.com","REGISTRY_USER":' + '"test","REGISTRY_PW":"test","REGISTRY_SSL":false}' ), "DY_DOCKER_HUB_REGISTRY_SETTINGS": "null", "DY_SIDECAR_AWS_S3_CLI_SETTINGS": ( - '{"AWS_S3_CLI_S3": {"S3_ACCESS_KEY": "12345678", "S3_BUCKET_NAME": "simcore", ' - '"S3_ENDPOINT": "http://172.17.0.1:9001", "S3_REGION": "us-east-1", "S3_SECRET_KEY": "12345678"}}' + '{"AWS_S3_CLI_S3":{"S3_ACCESS_KEY":"12345678","S3_BUCKET_NAME":"simcore",' + '"S3_ENDPOINT":"http://172.17.0.1:9001/","S3_REGION":"us-east-1","S3_SECRET_KEY":"12345678"}}' ), "DY_SIDECAR_CALLBACKS_MAPPING": ( - '{"metrics": {"service": "rt-web", "command": "ls", "timeout": 1.0}, "before_shutdown"' - ': [{"service": "rt-web", "command": "ls", "timeout": 1.0}, {"service": "s4l-core", ' - '"command": ["ls", "-lah"], "timeout": 1.0}], "inactivity": null}' + '{"metrics":{"service":"rt-web","command":"ls","timeout":1.0},"before_shutdown"' + ':[{"service":"rt-web","command":"ls","timeout":1.0},{"service":"s4l-core",' + '"command":["ls","-lah"],"timeout":1.0}],"inactivity":null}' ), "DY_SIDECAR_SERVICE_KEY": "simcore/services/dynamic/3dviewer", "DY_SIDECAR_SERVICE_VERSION": "2.4.5", @@ -579,13 +579,13 @@ async def test_merge_dynamic_sidecar_specs_with_user_specific_specs( for key in ["DY_SIDECAR_STATE_EXCLUDE", "DY_SIDECAR_STATE_PATHS"]: # this is a json of a list assert isinstance( - sorted_dict["TaskTemplate"]["ContainerSpec"]["Env"][key], str + sorted_dict["task_template"]["container_spec"]["env"][key], str ) unsorted_list = json.loads( - sorted_dict["TaskTemplate"]["ContainerSpec"]["Env"][key] + sorted_dict["task_template"]["container_spec"]["env"][key] ) assert isinstance(unsorted_list, list) - sorted_dict["TaskTemplate"]["ContainerSpec"]["Env"][key] = json.dumps( + sorted_dict["task_template"]["container_spec"]["env"][key] = json.dumps( unsorted_list.sort() ) assert dynamic_sidecar_spec_dict == expected_dynamic_sidecar_spec_dict From 469c228d59804164611227031e9bf7822d3179ca Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 16:27:31 +0100 Subject: [PATCH 078/121] fix --- .../tests/unit/with_dbs/test_api_route_clusters_details.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py index 2029b8a4dd3..5dd1abaa594 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_clusters_details.py @@ -155,7 +155,7 @@ async def test_get_cluster_details( authentication=SimpleAuthentication( username=gateway_username, password=SecretStr(local_dask_gateway_server.password), - ).model_dump(mode="json", by_alias=True), + ).model_dump(by_alias=True), ) # in its present state, the cluster should have no workers cluster_out = await _get_cluster_details( From f3e4857f5fff80c3e167fe699f49027ceed5b4b4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 16:31:35 +0100 Subject: [PATCH 079/121] fix typo --- ...t_modules_comp_scheduler_dask_scheduler.py | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index 2cbe93510d1..8567c8ccca0 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -737,7 +737,9 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.reset_mock() mocked_dask_client.get_task_result.assert_not_called() messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.model_validate_json + instrumentation_rabbit_client_parser, + 1, + InstrumentationRabbitMessage.model_validate_json, ) assert messages[0].metrics == "service_started" assert messages[0].service_uuid == exp_started_task.node_id @@ -780,7 +782,9 @@ async def _return_random_task_result(job_id) -> TaskOutputData: expected_progress=1, ) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.model_validate_jsonidate_jsonidate_json + instrumentation_rabbit_client_parser, + 1, + InstrumentationRabbitMessage.model_validate_json, ) assert messages[0].metrics == "service_stopped" assert messages[0].service_uuid == exp_started_task.node_id @@ -882,7 +886,9 @@ async def _return_2nd_task_running(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.reset_mock() mocked_dask_client.get_task_result.assert_not_called() messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.model_validate_json + instrumentation_rabbit_client_parser, + 1, + InstrumentationRabbitMessage.model_validate_json, ) assert messages[0].metrics == "service_started" assert messages[0].service_uuid == exp_started_task.node_id @@ -926,7 +932,9 @@ async def _return_2nd_task_failed(job_ids: list[str]) -> list[DaskClientTaskStat mocked_parse_output_data_fct.assert_not_called() expected_pending_tasks.remove(exp_started_task) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 1, InstrumentationRabbitMessage.model_validate_json + instrumentation_rabbit_client_parser, + 1, + InstrumentationRabbitMessage.model_validate_json, ) assert messages[0].metrics == "service_stopped" assert messages[0].service_uuid == exp_started_task.node_id @@ -970,7 +978,9 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta ) mocked_dask_client.get_task_result.assert_called_once_with(exp_started_task.job_id) messages = await _assert_message_received( - instrumentation_rabbit_client_parser, 2, InstrumentationRabbitMessage.model_validate_json + instrumentation_rabbit_client_parser, + 2, + InstrumentationRabbitMessage.model_validate_json, ) # NOTE: the service was fast and went directly to success assert messages[0].metrics == "service_started" From fa9028c815ef7faa0ccab99074770f3d86327575 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Mon, 11 Nov 2024 19:47:48 +0100 Subject: [PATCH 080/121] fix test --- .../src/models_library/service_settings_labels.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 7548c84478f..7c14abf65b0 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -276,7 +276,7 @@ class DynamicSidecarServiceLabels(BaseModel): ), ) - compose_spec: Json[ComposeSpecLabelDict | None] = Field( + compose_spec: Json[ComposeSpecLabelDict | None] | None = Field( None, alias="simcore.service.compose-spec", description=( From b8b8c68a739eadc3c2fefefe5a88a6d8c1be8483 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 10:44:03 +0100 Subject: [PATCH 081/121] fix typecheck --- .../src/simcore_service_director_v2/utils/dask.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index cb47232cc9b..7c70b3d25e6 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -2,7 +2,7 @@ import collections import logging from collections.abc import Awaitable, Callable, Coroutine, Generator -from typing import Any, Final, NoReturn, Optional, ParamSpec, TypeVar, cast, get_args +from typing import Any, Final, NoReturn, ParamSpec, TypeVar, cast, get_args from uuid import uuid4 import dask_gateway # type: ignore[import-untyped] @@ -61,7 +61,7 @@ ServiceKeyStr = str ServiceVersionStr = str -_PVType = Optional[_NPItemValue] +_PVType = _NPItemValue | None assert len(get_args(_PVType)) == len( # nosec get_args(PortValue) @@ -73,7 +73,7 @@ def _get_port_validation_errors(port_key: str, err: ValidationError) -> list[Err for error in errors: assert error["loc"][-1] != (port_key,) error["loc"] = error["loc"] + (port_key,) - return errors + return list(errors) def generate_dask_job_id( @@ -134,7 +134,7 @@ async def create_node_ports( db_manager=db_manager, ) except ValidationError as err: - raise PortsValidationError(project_id, node_id, err.errors()) from err + raise PortsValidationError(project_id, node_id, list(err.errors())) from err async def parse_output_data( From 189d84ce733039359f4e85896b3eec31e21ab1e6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 11:49:49 +0100 Subject: [PATCH 082/121] fix test --- .../utils/computations.py | 8 ++- .../with_dbs/test_api_route_computations.py | 64 +++++++++++-------- 2 files changed, 44 insertions(+), 28 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index 56fb24170ef..5ec5ae25980 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -1,4 +1,4 @@ -import datetime +import datetime as dt import logging from typing import Any @@ -127,11 +127,13 @@ async def find_deprecated_tasks( ): details for details in services_details } - today = datetime.datetime.now(tz=datetime.timezone.utc) + today = dt.datetime.now(tz=dt.UTC) def _is_service_deprecated(service: dict[str, Any]) -> bool: if deprecation_date := service.get("deprecated"): - deprecation_date = datetime.datetime.fromisoformat(deprecation_date).replace(tzinfo=datetime.UTC) + deprecation_date = dt.datetime.fromisoformat(deprecation_date).replace( + tzinfo=dt.UTC + ) is_deprecated: bool = today > deprecation_date return is_deprecated return False diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 8136c4bd126..4142a898025 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -6,7 +6,7 @@ # pylint: disable=unused-variable # pylint:disable=too-many-positional-arguments -import datetime +import datetime as dt import json import re import urllib.parse @@ -23,6 +23,7 @@ import respx from faker import Faker from fastapi import FastAPI, status +from models_library.api_schemas_catalog.services import ServiceGet from models_library.api_schemas_clusters_keeper.ec2_instances import EC2InstanceTypeGet from models_library.api_schemas_directorv2.comp_tasks import ( ComputationCreate, @@ -33,7 +34,6 @@ PricingPlanGet, PricingUnitGet, ) -from models_library.basic_types import VersionStr from models_library.clusters import DEFAULT_CLUSTER_ID, Cluster, ClusterID from models_library.projects import ProjectAtDB from models_library.projects_nodes import NodeID, NodeState @@ -111,8 +111,8 @@ def fake_service_details(mocks_dir: Path) -> ServiceMetaDataPublished: @pytest.fixture def fake_service_extras() -> ServiceExtras: - extra_example = ServiceExtras.model_config["json_schema_extra"]["examples"][2] - random_extras = ServiceExtras(**extra_example) + extra_example = ServiceExtras.model_config["json_schema_extra"]["examples"][2] # type: ignore + random_extras = ServiceExtras(**extra_example) # type: ignore assert random_extras is not None return random_extras @@ -120,15 +120,15 @@ def fake_service_extras() -> ServiceExtras: @pytest.fixture def fake_service_resources() -> ServiceResourcesDict: return TypeAdapter(ServiceResourcesDict).validate_python( - ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], + ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"][0], # type: ignore ) @pytest.fixture def fake_service_labels() -> dict[str, Any]: - return choice( - SimcoreServiceLabels.model_config["json_schema_extra"]["examples"] - ) # noqa: S311 + return choice( # noqa: S311 + SimcoreServiceLabels.model_config["json_schema_extra"]["examples"] # type: ignore + ) @pytest.fixture @@ -149,7 +149,9 @@ def mocked_director_service_fcts( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+$" ), name="get_service", - ).respond(json={"data": [fake_service_details.model_dump(mode="json", by_alias=True)]}) + ).respond( + json={"data": [fake_service_details.model_dump(mode="json", by_alias=True)]} + ) respx_mock.get( re.compile( r"/services/simcore%2Fservices%2F(comp|dynamic|frontend)%2F[^/]+/\d+.\d+.\d+/labels" @@ -162,7 +164,9 @@ def mocked_director_service_fcts( r"/service_extras/(simcore)%2F(services)%2F(comp|dynamic|frontend)%2F.+/(.+)" ), name="get_service_extras", - ).respond(json={"data": fake_service_extras.model_dump(mode="json", by_alias=True)}) + ).respond( + json={"data": fake_service_extras.model_dump(mode="json", by_alias=True)} + ) yield respx_mock @@ -184,7 +188,7 @@ def _mocked_services_details( return httpx.Response( 200, json=jsonable_encoder( - fake_service_details.copy( + fake_service_details.model_copy( update={ "key": urllib.parse.unquote(service_key), "version": service_version, @@ -225,19 +229,30 @@ def mocked_catalog_service_fcts_deprecated( def _mocked_services_details( request, service_key: str, service_version: str ) -> httpx.Response: + data_published = fake_service_details.model_copy( + update={ + "key": urllib.parse.unquote(service_key), + "version": service_version, + "deprecated": ( + dt.datetime.now(tz=dt.UTC) - dt.timedelta(days=1) + ).isoformat(), + } + ).model_dump(by_alias=True) + + deprecated = { + "deprecated": ( + dt.datetime.now(tz=dt.UTC) - dt.timedelta(days=1) + ).isoformat() + } + + data = {**ServiceGet.model_config["json_schema_extra"]["examples"][0], **data_published, **deprecated} # type: ignore + + payload = ServiceGet.model_validate(data) + return httpx.Response( - 200, + httpx.codes.OK, json=jsonable_encoder( - fake_service_details.copy( - update={ - "key": urllib.parse.unquote(service_key), - "version": service_version, - "deprecated": ( - datetime.datetime.now(tz=datetime.UTC) - - datetime.timedelta(days=1) - ).isoformat(), - } - ), + payload, by_alias=True, ), ) @@ -470,7 +485,6 @@ def project_nodes_overrides(request: pytest.FixtureRequest) -> dict[str, Any]: return request.param -@pytest.mark.testit async def test_create_computation_with_wallet( minimal_configuration: None, mocked_director_service_fcts: respx.MockRouter, @@ -618,8 +632,8 @@ async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_rai @pytest.mark.parametrize( "default_pricing_plan", [ - PricingPlanGet.construct( - **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] + PricingPlanGet.model_construct( + **PricingPlanGet.model_config["json_schema_extra"]["examples"][0] # type: ignore ) ], ) From 06813106488713df6583a79b517e5367266a4025 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 12:24:52 +0100 Subject: [PATCH 083/121] fix deprecated --- .../modules/db/repositories/comp_tasks/_utils.py | 2 +- .../dynamic_sidecar/docker_compose_specs.py | 2 +- .../scheduler/_core/_event_create_sidecars.py | 4 ++-- .../src/simcore_service_director_v2/utils/dask.py | 2 +- .../simcore_service_director_v2/utils/rabbitmq.py | 10 +++++----- .../director-v2/tests/unit/test_utils_dags.py | 15 ++++++++------- .../unit/with_dbs/test_api_route_computations.py | 2 +- 7 files changed, 19 insertions(+), 18 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index ef63ff4b823..bdb64cbbf99 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -89,7 +89,7 @@ async def _get_service_details( node.version, product_name, ) - obj: ServiceMetaDataPublished = ServiceMetaDataPublished.construct( + obj: ServiceMetaDataPublished = ServiceMetaDataPublished.model_construct( **service_details ) return obj diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py index b9d38ca6502..98ba1ea2f40 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py @@ -239,7 +239,7 @@ def _update_container_labels( spec_service_key, default_limits ) - label_keys = StandardSimcoreDockerLabels.construct( + label_keys = StandardSimcoreDockerLabels.model_construct( user_id=user_id, project_id=project_id, node_id=node_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py index a9a5af803c8..166d4562186 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py @@ -254,7 +254,7 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: include=_DYNAMIC_SIDECAR_SERVICE_EXTENDABLE_SPECS, ) ) - rabbit_message = ProgressRabbitMessageNode.construct( + rabbit_message = ProgressRabbitMessageNode.model_construct( user_id=scheduler_data.user_id, project_id=scheduler_data.project_id, node_id=scheduler_data.node_uuid, @@ -272,7 +272,7 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: ) ) - rabbit_message = ProgressRabbitMessageNode.construct( + rabbit_message = ProgressRabbitMessageNode.model_construct( user_id=scheduler_data.user_id, project_id=scheduler_data.project_id, node_id=scheduler_data.node_uuid, diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index 7c70b3d25e6..36075eb3bf3 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -314,7 +314,7 @@ def compute_task_labels( ValidationError """ product_name = run_metadata.get("product_name", UNDEFINED_DOCKER_LABEL) - standard_simcore_labels = StandardSimcoreDockerLabels.construct( + standard_simcore_labels = StandardSimcoreDockerLabels.model_construct( user_id=user_id, project_id=project_id, node_id=node_id, diff --git a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py index 57d014a3c0f..70249d3c1da 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py @@ -31,7 +31,7 @@ async def publish_service_started_metrics( simcore_user_agent: str, task: CompTaskAtDB, ) -> None: - message = InstrumentationRabbitMessage.construct( + message = InstrumentationRabbitMessage.model_construct( metrics="service_started", user_id=user_id, project_id=task.project_id, @@ -53,7 +53,7 @@ async def publish_service_stopped_metrics( task: CompTaskAtDB, task_final_state: RunningState, ) -> None: - message = InstrumentationRabbitMessage.construct( + message = InstrumentationRabbitMessage.model_construct( metrics="service_stopped", user_id=user_id, project_id=task.project_id, @@ -153,7 +153,7 @@ async def publish_service_log( log: str, log_level: LogLevelInt, ) -> None: - message = LoggerRabbitMessage.construct( + message = LoggerRabbitMessage.model_construct( user_id=user_id, project_id=project_id, node_id=node_id, @@ -172,7 +172,7 @@ async def publish_service_progress( node_id: NodeID, progress: NonNegativeFloat, ) -> None: - message = ProgressRabbitMessageNode.construct( + message = ProgressRabbitMessageNode.model_construct( user_id=user_id, project_id=project_id, node_id=node_id, @@ -188,7 +188,7 @@ async def publish_project_log( log: str, log_level: LogLevelInt, ) -> None: - message = LoggerRabbitMessage.construct( + message = LoggerRabbitMessage.model_construct( user_id=user_id, project_id=project_id, node_id=None, diff --git a/services/director-v2/tests/unit/test_utils_dags.py b/services/director-v2/tests/unit/test_utils_dags.py index 72e0383e0a4..125f3153db4 100644 --- a/services/director-v2/tests/unit/test_utils_dags.py +++ b/services/director-v2/tests/unit/test_utils_dags.py @@ -476,7 +476,7 @@ def pipeline_test_params( for x in range(_MANY_NODES) }, [ - CompTaskAtDB.construct( + CompTaskAtDB.model_construct( project_id=uuid4(), node_id=f"node_{x}", schema=NodeSchema(inputs={}, outputs={}), @@ -493,7 +493,7 @@ def pipeline_test_params( ) for x in range(_MANY_NODES) ], - PipelineDetails.construct( + PipelineDetails.model_construct( adjacency_list={f"node_{x}": [] for x in range(_MANY_NODES)}, progress=1.0, node_states={ @@ -527,7 +527,7 @@ def pipeline_test_params( }, [ # NOTE: we use construct here to be able to use non uuid names to simplify test setup - CompTaskAtDB.construct( + CompTaskAtDB.model_construct( project_id=uuid4(), node_id="node_1", schema=NodeSchema(inputs={}, outputs={}), @@ -541,7 +541,7 @@ def pipeline_test_params( modified=datetime.datetime.now(tz=datetime.timezone.utc), last_heartbeat=None, ), - CompTaskAtDB.construct( + CompTaskAtDB.model_construct( project_id=uuid4(), node_id="node_2", schema=NodeSchema(inputs={}, outputs={}), @@ -555,7 +555,7 @@ def pipeline_test_params( modified=datetime.datetime.now(tz=datetime.timezone.utc), last_heartbeat=None, ), - CompTaskAtDB.construct( + CompTaskAtDB.model_construct( project_id=uuid4(), node_id="node_3", schema=NodeSchema(inputs={}, outputs={}), @@ -571,7 +571,7 @@ def pipeline_test_params( progress=1.00, ), ], - PipelineDetails.construct( + PipelineDetails.model_construct( adjacency_list={ "node_1": ["node_2", "node_3"], "node_2": ["node_3"], @@ -597,5 +597,6 @@ async def test_compute_pipeline_details( pipeline_test_params.comp_tasks, ) assert ( - received_details.model_dump() == pipeline_test_params.expected_pipeline_details.model_dump() + received_details.model_dump() + == pipeline_test_params.expected_pipeline_details.model_dump() ) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index 4142a898025..add9c4d77d3 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -179,7 +179,7 @@ def mocked_catalog_service_fcts( ) -> Iterator[respx.MockRouter]: def _mocked_service_resources(request) -> httpx.Response: return httpx.Response( - 200, json=jsonable_encoder(fake_service_resources, by_alias=True) + httpx.codes.OK, json=jsonable_encoder(fake_service_resources, by_alias=True) ) def _mocked_services_details( From ccfe07f36a901aecaeab0885bf0fc3e23c06cab7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 13:16:42 +0100 Subject: [PATCH 084/121] revert endpoint type --- .../dynamic_sidecar/api_client/_public.py | 56 ++++++++++--------- .../dynamic_sidecar/api_client/_thin.py | 55 +++++++++--------- 2 files changed, 61 insertions(+), 50 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py index c83b33c2299..5945e07b8e3 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -81,7 +81,7 @@ def _dynamic_services_scheduler_settings(self) -> DynamicServicesSchedulerSettin return settings async def is_healthy( - self, dynamic_sidecar_endpoint: str, *, with_retry: bool = True + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, with_retry: bool = True ) -> bool: """returns True if service is UP and running else False""" try: @@ -97,7 +97,9 @@ async def is_healthy( except BaseHttpClientError: return False - async def containers_inspect(self, dynamic_sidecar_endpoint: str) -> dict[str, Any]: + async def containers_inspect( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> dict[str, Any]: """ returns dict containing docker inspect result form all dynamic-sidecar started containers @@ -110,7 +112,7 @@ async def containers_inspect(self, dynamic_sidecar_endpoint: str) -> dict[str, A @log_decorator(logger=_logger) async def containers_docker_status( - self, dynamic_sidecar_endpoint: str + self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> dict[str, dict[str, str]]: try: response = await self._thin_client.get_containers( @@ -124,7 +126,7 @@ async def containers_docker_status( @log_decorator(logger=_logger) async def toggle_service_ports_io( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, *, enable_outputs: bool, enable_inputs: bool, @@ -137,7 +139,7 @@ async def toggle_service_ports_io( @log_decorator(logger=_logger) async def service_outputs_create_dirs( - self, dynamic_sidecar_endpoint: str, outputs_labels: dict[str, Any] + self, dynamic_sidecar_endpoint: AnyHttpUrl, outputs_labels: dict[str, Any] ) -> None: await self._thin_client.post_containers_ports_outputs_dirs( dynamic_sidecar_endpoint, outputs_labels=outputs_labels @@ -145,7 +147,7 @@ async def service_outputs_create_dirs( @log_decorator(logger=_logger) async def get_entrypoint_container_name( - self, dynamic_sidecar_endpoint: str, dynamic_sidecar_network_name: str + self, dynamic_sidecar_endpoint: AnyHttpUrl, dynamic_sidecar_network_name: str ) -> str: """ While this API raises EntrypointContainerNotFoundError @@ -169,7 +171,7 @@ async def get_entrypoint_container_name( async def _attach_container_to_network( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, container_id: str, network_id: str, network_aliases: list[str], @@ -183,7 +185,7 @@ async def _attach_container_to_network( ) async def _detach_container_from_network( - self, dynamic_sidecar_endpoint: str, container_id: str, network_id: str + self, dynamic_sidecar_endpoint: AnyHttpUrl, container_id: str, network_id: str ) -> None: """detaches a container from a network if not already detached""" await self._thin_client.post_containers_networks_detach( @@ -192,7 +194,7 @@ async def _detach_container_from_network( async def attach_service_containers_to_project_network( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, dynamic_sidecar_network_name: str, project_network: str, project_id: ProjectID, @@ -248,7 +250,7 @@ async def attach_service_containers_to_project_network( async def detach_service_containers_from_project_network( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, project_network: str, project_id: ProjectID, ) -> None: @@ -280,14 +282,14 @@ async def detach_service_containers_from_project_network( async def submit_docker_compose_spec( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, compose_spec: str, ) -> None: await self._thin_client.post_containers_compose_spec( dynamic_sidecar_endpoint, compose_spec=compose_spec ) - def _get_client(self, dynamic_sidecar_endpoint: str) -> Client: + def _get_client(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Client: return Client( app=self._app, async_client=self._async_client, @@ -297,7 +299,7 @@ def _get_client(self, dynamic_sidecar_endpoint: str) -> Client: async def _await_for_result( self, task_id: TaskId, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, task_timeout: PositiveFloat, progress_callback: ProgressCallback | None = None, ) -> Any | None: @@ -313,7 +315,7 @@ async def _await_for_result( async def create_containers( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, metrics_params: CreateServiceMetricsAdditionalParams, progress_callback: ProgressCallback | None = None, ) -> None: @@ -332,7 +334,7 @@ async def create_containers( async def stop_service( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, progress_callback: ProgressCallback | None = None, ) -> None: response = await self._thin_client.post_containers_tasks_down( @@ -347,7 +349,7 @@ async def stop_service( progress_callback, ) - async def restore_service_state(self, dynamic_sidecar_endpoint: str) -> int: + async def restore_service_state(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> int: response = await self._thin_client.post_containers_tasks_state_restore( dynamic_sidecar_endpoint ) @@ -362,7 +364,9 @@ async def restore_service_state(self, dynamic_sidecar_endpoint: str) -> int: assert isinstance(result, int) # nosec return result - async def pull_user_services_images(self, dynamic_sidecar_endpoint: str) -> None: + async def pull_user_services_images( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> None: response = await self._thin_client.post_containers_images_pull( dynamic_sidecar_endpoint ) @@ -377,7 +381,7 @@ async def pull_user_services_images(self, dynamic_sidecar_endpoint: str) -> None async def save_service_state( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, progress_callback: ProgressCallback | None = None, ) -> int: response = await self._thin_client.post_containers_tasks_state_save( @@ -396,7 +400,7 @@ async def save_service_state( async def pull_service_input_ports( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, port_keys: list[ServicePortKey] | None = None, ) -> int: response = await self._thin_client.post_containers_tasks_ports_inputs_pull( @@ -414,7 +418,7 @@ async def pull_service_input_ports( async def pull_service_output_ports( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, port_keys: list[str] | None = None, ) -> int: response = await self._thin_client.post_containers_tasks_ports_outputs_pull( @@ -433,7 +437,7 @@ async def pull_service_output_ports( async def push_service_output_ports( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, progress_callback: ProgressCallback | None = None, ) -> None: response = await self._thin_client.post_containers_tasks_ports_outputs_push( @@ -448,7 +452,7 @@ async def push_service_output_ports( progress_callback, ) - async def restart_containers(self, dynamic_sidecar_endpoint: str) -> None: + async def restart_containers(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> None: response = await self._thin_client.post_containers_tasks_restart( dynamic_sidecar_endpoint ) @@ -463,7 +467,7 @@ async def restart_containers(self, dynamic_sidecar_endpoint: str) -> None: async def update_volume_state( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, volume_category: VolumeCategory, volume_status: VolumeStatus, ) -> None: @@ -485,7 +489,7 @@ async def configure_proxy( await self._thin_client.proxy_config_load(proxy_endpoint, proxy_configuration) async def get_service_activity( - self, dynamic_sidecar_endpoint: str + self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> ActivityInfoOrNone: response = await self._thin_client.get_containers_activity( dynamic_sidecar_endpoint @@ -495,7 +499,9 @@ async def get_service_activity( ActivityInfo.model_validate(decoded_response) if decoded_response else None ) - async def free_reserved_disk_space(self, dynamic_sidecar_endpoint: str) -> None: + async def free_reserved_disk_space( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> None: await self._thin_client.post_disk_reserved_free(dynamic_sidecar_endpoint) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index 5cc771b7316..d992ba75bc3 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -5,6 +5,7 @@ from httpx import Response, Timeout from models_library.services_creation import CreateServiceMetricsAdditionalParams from models_library.sidecar_volumes import VolumeCategory, VolumeStatus +from pydantic import AnyHttpUrl from servicelib.docker_constants import SUFFIX_EGRESS_PROXY_NAME from servicelib.fastapi.http_client_thin import ( BaseThinClient, @@ -56,7 +57,7 @@ def __init__(self, app: FastAPI): def _get_url( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, postfix: str, *, no_api_version: bool = False, @@ -65,23 +66,27 @@ def _get_url( api_version = "" if no_api_version else f"{self.API_VERSION}/" return f"{dynamic_sidecar_endpoint}{api_version}{postfix}" - async def _get_health_common(self, dynamic_sidecar_endpoint: str) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "health", no_api_version=True) + async def _get_health_common( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/health", no_api_version=True) return await self.client.get(url, timeout=self._health_request_timeout) @retry_on_errors() @expect_status(status.HTTP_200_OK) - async def get_health(self, dynamic_sidecar_endpoint: str) -> Response: + async def get_health(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Response: return await self._get_health_common(dynamic_sidecar_endpoint) @expect_status(status.HTTP_200_OK) - async def get_health_no_retry(self, dynamic_sidecar_endpoint: str) -> Response: + async def get_health_no_retry( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> Response: return await self._get_health_common(dynamic_sidecar_endpoint) @retry_on_errors() @expect_status(status.HTTP_200_OK) async def get_containers( - self, dynamic_sidecar_endpoint: str, *, only_status: bool + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, only_status: bool ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers") return await self.client.get(url, params={"only_status": only_status}) @@ -90,7 +95,7 @@ async def get_containers( @expect_status(status.HTTP_204_NO_CONTENT) async def patch_containers_ports_io( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, *, enable_outputs: bool, enable_inputs: bool, @@ -103,7 +108,7 @@ async def patch_containers_ports_io( @retry_on_errors() @expect_status(status.HTTP_204_NO_CONTENT) async def post_containers_ports_outputs_dirs( - self, dynamic_sidecar_endpoint: str, *, outputs_labels: dict[str, Any] + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, outputs_labels: dict[str, Any] ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs/dirs") return await self.client.post(url, json={"outputs_labels": outputs_labels}) @@ -111,7 +116,7 @@ async def post_containers_ports_outputs_dirs( @retry_on_errors() @expect_status(status.HTTP_200_OK) async def get_containers_name( - self, dynamic_sidecar_endpoint: str, *, dynamic_sidecar_network_name: str + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, dynamic_sidecar_network_name: str ) -> Response: filters = json.dumps( { @@ -128,7 +133,7 @@ async def get_containers_name( @expect_status(status.HTTP_204_NO_CONTENT) async def post_containers_networks_attach( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, *, container_id: str, network_id: str, @@ -147,7 +152,7 @@ async def post_containers_networks_attach( @expect_status(status.HTTP_204_NO_CONTENT) async def post_containers_networks_detach( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, *, container_id: str, network_id: str, @@ -165,7 +170,7 @@ async def post_containers_networks_detach( @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_compose_spec( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, *, compose_spec: str, ) -> Response: @@ -176,7 +181,7 @@ async def post_containers_compose_spec( @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, *, metrics_params: CreateServiceMetricsAdditionalParams, ) -> Response: @@ -188,7 +193,7 @@ async def post_containers_tasks( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_down( - self, dynamic_sidecar_endpoint: str + self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers:down") return await self.client.post(url) @@ -196,7 +201,7 @@ async def post_containers_tasks_down( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_state_restore( - self, dynamic_sidecar_endpoint: str + self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/state:restore") return await self.client.post(url) @@ -204,7 +209,7 @@ async def post_containers_tasks_state_restore( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_state_save( - self, dynamic_sidecar_endpoint: str + self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/state:save") return await self.client.post(url) @@ -212,7 +217,7 @@ async def post_containers_tasks_state_save( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_images_pull( - self, dynamic_sidecar_endpoint: str + self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/images:pull") return await self.client.post(url) @@ -221,7 +226,7 @@ async def post_containers_images_pull( @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_ports_inputs_pull( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, port_keys: list[str] | None = None, ) -> Response: port_keys = [] if port_keys is None else port_keys @@ -232,7 +237,7 @@ async def post_containers_tasks_ports_inputs_pull( @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_ports_outputs_pull( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, port_keys: list[str] | None = None, ) -> Response: port_keys = [] if port_keys is None else port_keys @@ -242,7 +247,7 @@ async def post_containers_tasks_ports_outputs_pull( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_ports_outputs_push( - self, dynamic_sidecar_endpoint: str + self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/ports/outputs:push") return await self.client.post(url) @@ -250,7 +255,7 @@ async def post_containers_tasks_ports_outputs_push( @retry_on_errors() @expect_status(status.HTTP_202_ACCEPTED) async def post_containers_tasks_restart( - self, dynamic_sidecar_endpoint: str + self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers:restart") return await self.client.post(url) @@ -259,7 +264,7 @@ async def post_containers_tasks_restart( @expect_status(status.HTTP_204_NO_CONTENT) async def put_volumes( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, volume_category: VolumeCategory, volume_status: VolumeStatus, ) -> Response: @@ -270,7 +275,7 @@ async def put_volumes( @retry_on_errors() @expect_status(status.HTTP_200_OK) async def proxy_config_load( - self, proxy_endpoint: str, proxy_configuration: dict[str, Any] + self, proxy_endpoint: AnyHttpUrl, proxy_configuration: dict[str, Any] ) -> Response: url = self._get_url(proxy_endpoint, "load", no_api_version=True) return await self.client.post(url, json=proxy_configuration) @@ -279,7 +284,7 @@ async def proxy_config_load( @expect_status(status.HTTP_200_OK) async def get_containers_activity( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "containers/activity") return await self.client.get(url) @@ -288,7 +293,7 @@ async def get_containers_activity( @expect_status(status.HTTP_204_NO_CONTENT) async def post_disk_reserved_free( self, - dynamic_sidecar_endpoint: str, + dynamic_sidecar_endpoint: AnyHttpUrl, ) -> Response: url = self._get_url(dynamic_sidecar_endpoint, "disk/reserved:free") return await self.client.post(url) From 90f887e9dd44af8e5eff56e1490bd1d68b155efa Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 13:27:19 +0100 Subject: [PATCH 085/121] fix url path --- .../modules/dynamic_sidecar/api_client/_thin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index d992ba75bc3..21ef1bbe279 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -69,7 +69,7 @@ def _get_url( async def _get_health_common( self, dynamic_sidecar_endpoint: AnyHttpUrl ) -> Response: - url = self._get_url(dynamic_sidecar_endpoint, "/health", no_api_version=True) + url = self._get_url(dynamic_sidecar_endpoint, "health", no_api_version=True) return await self.client.get(url, timeout=self._health_request_timeout) @retry_on_errors() From 0a0016f295ee76bf06de86fee85d91d36b81e98a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 13:31:46 +0100 Subject: [PATCH 086/121] Update services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py Co-authored-by: Andrei Neagu <5694077+GitHK@users.noreply.github.com> --- .../src/simcore_service_director_v2/utils/osparc_variables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py b/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py index 5a3321510e6..6704c8369ef 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/osparc_variables.py @@ -89,7 +89,7 @@ def copy( return {k: self._variables_getters[k] for k in selection} -_HANDLERS_TIMEOUT: Final[NonNegativeInt] = TypeAdapter(NonNegativeInt).validate_python(4) +_HANDLERS_TIMEOUT: Final[NonNegativeInt] = 4 async def resolve_variables_from_context( From f10922f635e5abe03336485c712c56f69d4a1772 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 13:35:46 +0100 Subject: [PATCH 087/121] set specific type: ignore --- .../src/simcore_service_director_v2/models/pricing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/pricing.py b/services/director-v2/src/simcore_service_director_v2/models/pricing.py index 35ff94c744d..43ade424954 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/pricing.py +++ b/services/director-v2/src/simcore_service_director_v2/models/pricing.py @@ -21,7 +21,7 @@ class PricingInfo(BaseModel): "pricing_plan_id": 1, "pricing_unit_id": 1, "pricing_unit_cost_id": 1, - "pricing_unit_cost": Decimal(10), # type: ignore + "pricing_unit_cost": Decimal(10), # type: ignore[dict-item] } ] } From ebccb82bdbd8f0a91030ab4d71cd5c88ec4867d8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 13:48:16 +0100 Subject: [PATCH 088/121] revert example --- .../simcore_service_director_v2/models/comp_runs.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index 2a1bc1e7c9c..a0c152a1364 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -94,8 +94,8 @@ def convert_null_to_empty_metadata(cls, v): "cluster_id": 0, "iteration": 42, "result": "NOT_STARTED", - "created": "2021-03-01T12:07:34.19161", - "modified": "2021-03-01T13:07:34.19161", + "created": "2021-03-01 13:07:34.19161", + "modified": "2021-03-01 13:07:34.19161", "use_on_demand_clusters": False, }, { @@ -105,10 +105,10 @@ def convert_null_to_empty_metadata(cls, v): "cluster_id": 123, "iteration": 12, "result": "SUCCESS", - "created": "2021-03-01T13:07:34.19161", - "modified": "2021-03-01T14:07:34.19161", - "started": "2021-03-01T08:07:34.19161", - "ended": "2021-03-01T13:07:34.10", + "created": "2021-03-01 13:07:34.19161", + "modified": "2021-03-0 13:07:34.19161", + "started": "2021-03-01 8:07:34.19161", + "ended": "2021-03-01 13:07:34.10", "metadata": { "node_id_names_map": {}, "product_name": "osparc", From 085866450ffc70d7159f18ee63118b4527617d36 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 13:51:17 +0100 Subject: [PATCH 089/121] revert date --- .../src/simcore_service_director_v2/models/comp_tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 61b9e78878a..3cee1ba22ad 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -233,8 +233,8 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: "state": "NOT_STARTED", "progress": 0.44, "last_heartbeat": None, - "created": "2022-05-20T13:28:31.139", - "modified": "2023-06-23T15:58:32.83308", + "created": "2022-05-20 13:28:31.139+00", + "modified": "2023-06-23 15:58:32.833081+00", "pricing_info": { "pricing_plan_id": 1, "pricing_unit_id": 1, From c3015df75e687b0efd15bc708abfcc15287b575c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 14:13:37 +0100 Subject: [PATCH 090/121] Update services/director-v2/src/simcore_service_director_v2/core/application.py Co-authored-by: Sylvain <35365065+sanderegg@users.noreply.github.com> --- .../src/simcore_service_director_v2/core/application.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 618015d6ab1..d611351f782 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -128,7 +128,7 @@ def create_base_app(settings: AppSettings | None = None) -> FastAPI: logging.getLogger(name).setLevel(quiet_level) app = FastAPI( - debug=settings.SC_BOOT_MODE == BootMode.DEVELOPMENT, + debug=settings.SC_BOOT_MODE is BootMode.DEVELOPMENT, title=PROJECT_NAME, description=SUMMARY, version=API_VERSION, From 337db2d71a22c57e79f826ec1a57ba4ccb283b56 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 14:22:59 +0100 Subject: [PATCH 091/121] fix datetime --- .../src/simcore_service_director_v2/utils/computations.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index 5ec5ae25980..93e3197c71b 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -2,6 +2,7 @@ import logging from typing import Any +import arrow from models_library.projects_state import RunningState from models_library.services import ServiceKeyVersion from models_library.services_regex import SERVICE_KEY_RE @@ -131,7 +132,7 @@ async def find_deprecated_tasks( def _is_service_deprecated(service: dict[str, Any]) -> bool: if deprecation_date := service.get("deprecated"): - deprecation_date = dt.datetime.fromisoformat(deprecation_date).replace( + deprecation_date = arrow.get(deprecation_date).datetime.replace( tzinfo=dt.UTC ) is_deprecated: bool = today > deprecation_date From 5fe085867264fe483ede05e6d974b2f1c22db3b0 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 14:26:38 +0100 Subject: [PATCH 092/121] null fields --- .../api_schemas_directorv2/clusters.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index fb62de2e548..9af10c45e59 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -168,16 +168,15 @@ def set_default_thumbnail_if_empty(cls, v, info: ValidationInfo): class ClusterPatch(BaseCluster): - name: str | None = Field(default=None) # type: ignore[assignment] - description: str | None = Field(default=None) - type: ClusterTypeInModel | None = Field(default=None) # type: ignore[assignment] - owner: GroupID | None = Field(default=None) # type: ignore[assignment] - thumbnail: HttpUrl | None = Field(default=None) - endpoint: AnyUrl | None = Field(default=None) # type: ignore[assignment] - authentication: ExternalClusterAuthentication | None = Field(default=None) # type: ignore[assignment] + name: str | None = None # type: ignore[assignment] + description: str | None = None + type: ClusterTypeInModel | None = None # type: ignore[assignment] + owner: GroupID | None = None # type: ignore[assignment] + thumbnail: HttpUrl | None = None + endpoint: AnyUrl | None = None # type: ignore[assignment] + authentication: ExternalClusterAuthentication | None = None # type: ignore[assignment] access_rights: dict[GroupID, ClusterAccessRights] | None = Field( # type: ignore[assignment] - default=None, - alias="accessRights" + default=None, alias="accessRights" ) model_config = ConfigDict( From 31d1fff840aae508b4cb30879f89982b0904e04a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 14:30:42 +0100 Subject: [PATCH 093/121] revert --- api/specs/director/schemas/scripts/create_node-meta-schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/specs/director/schemas/scripts/create_node-meta-schema.py b/api/specs/director/schemas/scripts/create_node-meta-schema.py index 61bb3e7c0f6..29b4a02a9b2 100644 --- a/api/specs/director/schemas/scripts/create_node-meta-schema.py +++ b/api/specs/director/schemas/scripts/create_node-meta-schema.py @@ -15,7 +15,7 @@ if __name__ == "__main__": with Path.open(CURRENT_DIR.parent / "node-meta-v0.0.1-pydantic.json", "w") as f: - schema = ServiceMetaDataPublished.model_json_schema() + schema = ServiceMetaDataPublished.schema_json() schema_without_ref = jsonref.loads(schema) json.dump(schema_without_ref, f, indent=2) From fa334654edd4dcfa29a41b5bbaebd9e31cf9eea6 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 14:33:11 +0100 Subject: [PATCH 094/121] fix unuseful alias --- .../src/simcore_service_director_v2/core/settings.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 49389832d18..062e1ec4d5f 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -167,9 +167,7 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings): DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = Field(default=None) # extras - SWARM_STACK_NAME: str = Field( - "undefined-please-check", validation_alias="SWARM_STACK_NAME" - ) + SWARM_STACK_NAME: str = Field(default="undefined-please-check") SERVICE_TRACKING_HEARTBEAT: datetime.timedelta = Field( default=DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL, description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)" From 5b3685b14f72208e78266c03d8d153d9bc0e5a6e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 14:35:08 +0100 Subject: [PATCH 095/121] revert datetime --- .../src/simcore_service_director_v2/models/comp_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 3cee1ba22ad..d502a8bf825 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -228,7 +228,7 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: } }, "image": image_example, - "submit": "2021-03-01T13:07:34.19161", + "submit": "2021-03-01 13:07:34.19161", "node_class": "INTERACTIVE", "state": "NOT_STARTED", "progress": 0.44, From 0b80e72465ac4b705876792332ec132c449698c0 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 14:53:44 +0100 Subject: [PATCH 096/121] revert required field --- .../src/models_library/api_schemas_directorv2/clusters.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index 9af10c45e59..28d4fe761c2 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -125,7 +125,6 @@ class ClusterCreate(BaseCluster): "name": "My awesome cluster", "type": ClusterTypeInModel.ON_PREMISE, "endpoint": "https://registry.osparc-development.fake.dev", - "owner": None, "authentication": { "type": "simple", "username": "someuser", From d3c26801ff5127c7cf0224f08ca147829bdbc5a9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 15:43:41 +0100 Subject: [PATCH 097/121] revert bump-pydantic --- .../src/simcore_service_director_v2/models/comp_runs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index a0c152a1364..08c0ce6d948 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -39,7 +39,7 @@ class CompRunsAtDB(BaseModel): run_id: PositiveInt project_uuid: ProjectID user_id: UserID - cluster_id: ClusterID | None = None + cluster_id: ClusterID | None iteration: PositiveInt result: RunningState created: datetime.datetime From b86e5166f088aedb7de8836461e922ecb317cd7c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 15:55:15 +0100 Subject: [PATCH 098/121] fix validator --- .../models-library/src/models_library/clusters.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py index 60e74b76b42..5b2a09ac5ed 100644 --- a/packages/models-library/src/models_library/clusters.py +++ b/packages/models-library/src/models_library/clusters.py @@ -1,6 +1,6 @@ from enum import auto from pathlib import Path -from typing import Final, Literal, TypeAlias +from typing import Final, Literal, Self, TypeAlias from pydantic import ( AnyUrl, @@ -222,13 +222,12 @@ class Cluster(BaseCluster): ) @model_validator(mode="after") - @classmethod - def _check_owner_has_access_rights(cls, values): - is_default_cluster = bool(values.id == DEFAULT_CLUSTER_ID) - owner_gid = values.owner + def _check_owner_has_access_rights(self) -> Self: + is_default_cluster = bool(self.id == DEFAULT_CLUSTER_ID) + owner_gid = self.owner # check owner is in the access rights, if not add it - access_rights = values.access_rights or {} + access_rights = self.access_rights or {} if owner_gid not in access_rights: access_rights[owner_gid] = ( CLUSTER_USER_RIGHTS if is_default_cluster else CLUSTER_ADMIN_RIGHTS @@ -239,5 +238,5 @@ def _check_owner_has_access_rights(cls, values): ): msg = f"the cluster owner access rights are incorrectly set: {access_rights[owner_gid]}" raise ValueError(msg) - values.access_rights = access_rights - return values + self.access_rights = access_rights + return self From e73acecc5b182bc207bfc1d97460927752cf54f3 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 16:26:53 +0100 Subject: [PATCH 099/121] add comment --- packages/models-library/src/models_library/errors.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/errors.py b/packages/models-library/src/models_library/errors.py index e33ec782d8d..26b4aa0d91d 100644 --- a/packages/models-library/src/models_library/errors.py +++ b/packages/models-library/src/models_library/errors.py @@ -1,6 +1,8 @@ from typing import Any -from typing_extensions import TypedDict +from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict + TypedDict, +) Loc = tuple[int | str, ...] From 18a619e8d4fa9d12c2408155fc0d88837476d935 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 16:31:52 +0100 Subject: [PATCH 100/121] remove unused code --- .../src/simcore_service_director_v2/core/errors.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index d5b9f0d2183..213cd4744bf 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -142,6 +142,7 @@ def __init__(self, pipeline_id: str, msg: str | None = None): class TaskSchedulingError(SchedulerError): """A task cannot be scheduled""" + code: str = "task scheduler error" def __init__(self, project_id: ProjectID, node_id: NodeID, msg: str | None = None): @@ -224,12 +225,10 @@ def get_errors(self) -> list[ErrorDict]: class ComputationalSchedulerChangedError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.scheduler_changed" # type: ignore msg_template = "The dask scheduler ID changed from '{original_scheduler_id}' to '{current_scheduler_id}'" class ComputationalBackendNotConnectedError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.not_connected" # type: ignore msg_template = "The dask computational backend is not connected" @@ -238,24 +237,20 @@ class ComputationalBackendNoS3AccessError(OsparcErrorMixin, SchedulerError): class ComputationalBackendTaskNotFoundError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.task_not_found" # type: ignore msg_template = ( "The dask computational backend does not know about the task '{job_id}'" ) class ComputationalBackendTaskResultsNotReadyError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.task_result_not_ready" # type: ignore msg_template = "The task result is not ready yet for job '{job_id}'" class ClustersKeeperNotAvailableError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.clusters_keeper_not_available" # type: ignore msg_template = "clusters-keeper service is not available!" class ComputationalBackendOnDemandNotReadyError(OsparcErrorMixin, SchedulerError): - code = "computational_backend.on_demand_cluster.not_ready" # type: ignore msg_template = ( "The on demand computational cluster is not ready 'est. remaining time: {eta}'" ) @@ -265,7 +260,6 @@ class ComputationalBackendOnDemandNotReadyError(OsparcErrorMixin, SchedulerError # SCHEDULER/CLUSTER ERRORS # class ClusterNotFoundError(OsparcErrorMixin, SchedulerError): - code = "cluster.not_found" # type: ignore msg_template = "The cluster '{cluster_id}' not found" @@ -283,24 +277,20 @@ class ClusterInvalidOperationError(OsparcErrorMixin, SchedulerError): class DaskClientRequestError(OsparcErrorMixin, SchedulerError): - code = "dask_client.request.error" # type: ignore msg_template = ( "The dask client to cluster on '{endpoint}' did an invalid request '{error}'" ) class DaskClusterError(OsparcErrorMixin, SchedulerError): - code = "cluster.error" # type: ignore msg_template = "The dask cluster on '{endpoint}' encountered an error: '{error}'" class DaskGatewayServerError(OsparcErrorMixin, SchedulerError): - code = "gateway.error" # type: ignore msg_template = "The dask gateway on '{endpoint}' encountered an error: '{error}'" class DaskClientAcquisisitonError(OsparcErrorMixin, SchedulerError): - code = "dask_client.acquisition.error" # type: ignore msg_template = ( "The dask client to cluster '{cluster}' encountered an error '{error}'" ) From f31ba3792a9f42f4f9b21c161f8ff8dfa8fcdfc0 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Tue, 12 Nov 2024 22:07:25 +0100 Subject: [PATCH 101/121] fix mypy --- .../src/simcore_service_director_v2/cli/_core.py | 6 ++++-- .../models/dynamic_services_scheduler.py | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py index 945cb790e39..d5f2de11dbf 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py @@ -52,14 +52,16 @@ async def _initialized_app(only_db: bool = False) -> AsyncIterator[FastAPI]: ### PROJECT SAVE STATE -def _get_dynamic_sidecar_endpoint(settings: AppSettings, node_id: NodeIDStr) -> str: +def _get_dynamic_sidecar_endpoint( + settings: AppSettings, node_id: NodeIDStr +) -> AnyHttpUrl: dynamic_sidecar_names = DynamicSidecarNamesHelper.make(NodeID(node_id)) hostname = dynamic_sidecar_names.service_name_dynamic_sidecar port = settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_PORT url = AnyHttpUrl.build( # pylint: disable=no-member scheme="http", host=hostname, port=port ) - return f"{url}" + return url async def _save_node_state( diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index f9f0133c586..0a2322c11c1 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -389,12 +389,12 @@ class SchedulerData(CommonServiceDetails, DynamicSidecarServiceLabels): port: PortInt = Field(default=8000, description="dynamic-sidecar port") @property - def endpoint(self) -> str: + def endpoint(self) -> AnyHttpUrl: """endpoint where all the services are exposed""" url = AnyHttpUrl.build( # pylint: disable=no-member scheme="http", host=self.hostname, port=self.port ) - return f"{url}" + return url dynamic_sidecar: DynamicSidecar = Field( ..., From 0679046a468e7b28fc127ced2f926b57b0bcc7c5 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 07:58:15 +0100 Subject: [PATCH 102/121] revert --- services/director-v2/tests/mocks/fake_task.json | 1 - 1 file changed, 1 deletion(-) diff --git a/services/director-v2/tests/mocks/fake_task.json b/services/director-v2/tests/mocks/fake_task.json index eb8cc71db76..b26ebfa9ba5 100644 --- a/services/director-v2/tests/mocks/fake_task.json +++ b/services/director-v2/tests/mocks/fake_task.json @@ -61,7 +61,6 @@ "created": "1961-07-06T11:24:30.877Z", "modified": "2008-03-24T07:02:09.279Z", "last_heartbeat": null, - "pricing_info": null, "hardware_info": { "aws_ec2_instances": [] } From 149b7b5d085c7132b7e228cd53f7017a44d1b44c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 08:54:58 +0100 Subject: [PATCH 103/121] fix datetime --- .../src/common_library/pydantic_validators.py | 27 ++++++++++++++----- .../models/comp_runs.py | 4 +-- .../models/comp_tasks.py | 24 +++++++++-------- 3 files changed, 35 insertions(+), 20 deletions(-) diff --git a/packages/common-library/src/common_library/pydantic_validators.py b/packages/common-library/src/common_library/pydantic_validators.py index 471ba1a4bf3..c30413e3622 100644 --- a/packages/common-library/src/common_library/pydantic_validators.py +++ b/packages/common-library/src/common_library/pydantic_validators.py @@ -1,12 +1,25 @@ -import datetime +import datetime as dt import re import warnings -from datetime import timedelta from pydantic import TypeAdapter, field_validator -def _validate_legacy_timedelta_str(time_str: str | timedelta) -> str | timedelta: +def validate_legacy_datetime_str(v: str | dt.datetime) -> dt.datetime: + if isinstance(v, dt.datetime): + return v + try: + return dt.datetime.fromisoformat(v) + except ValueError: + pass + + try: + return dt.datetime.strptime(v, "%Y-%m-%d %H:%M:%S.%f%z") + except ValueError: + raise ValueError("Timestamp must be in a recognized datetime format") + + +def _validate_legacy_timedelta_str(time_str: str | dt.timedelta) -> str | dt.timedelta: if not isinstance(time_str, str): return time_str @@ -34,14 +47,14 @@ def validate_numeric_string_as_timedelta(field: str): """Transforms a float/int number into a valid datetime as it used to work in the past""" def _numeric_string_as_timedelta( - v: datetime.timedelta | str | float, - ) -> datetime.timedelta | str | float: + v: dt.timedelta | str | float, + ) -> dt.timedelta | str | float: if isinstance(v, str): try: converted_value = float(v) - iso8601_format = TypeAdapter(timedelta).dump_python( - timedelta(seconds=converted_value), mode="json" + iso8601_format = TypeAdapter(dt.timedelta).dump_python( + dt.timedelta(seconds=converted_value), mode="json" ) warnings.warn( f"{field}='{v}' -should be set to-> {field}='{iso8601_format}' (ISO8601 datetime format). " diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index 08c0ce6d948..9b466bbc49f 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -106,8 +106,8 @@ def convert_null_to_empty_metadata(cls, v): "iteration": 12, "result": "SUCCESS", "created": "2021-03-01 13:07:34.19161", - "modified": "2021-03-0 13:07:34.19161", - "started": "2021-03-01 8:07:34.19161", + "modified": "2021-03-01 13:07:34.19161", + "started": "2021-03-01 08:07:34.19161", "ended": "2021-03-01 13:07:34.10", "metadata": { "node_id_names_map": {}, diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index d502a8bf825..e5213de8f8e 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -1,7 +1,8 @@ -import datetime +import datetime as dt from contextlib import suppress from typing import Annotated, Any +from common_library.pydantic_validators import validate_legacy_datetime_str from dask_task_models_library.container_tasks.protocol import ContainerEnvsDict from models_library.api_schemas_directorv2.services import NodeRequirements from models_library.basic_regex import SIMPLE_VERSION_RE @@ -16,6 +17,7 @@ from models_library.services_resources import BootMode from pydantic import ( BaseModel, + BeforeValidator, ByteSize, ConfigDict, Field, @@ -128,25 +130,25 @@ class CompTaskAtDB(BaseModel): description="the hex digest of the resolved inputs +outputs hash at the time when the last outputs were generated", ) image: Image - submit: datetime.datetime - start: datetime.datetime | None = Field(default=None) - end: datetime.datetime | None = Field(default=None) + submit: dt.datetime + start: dt.datetime | None = None + end: dt.datetime | None = None state: RunningState - task_id: PositiveInt | None = Field(default=None) + task_id: PositiveInt | None = None internal_id: PositiveInt node_class: NodeClass - errors: list[ErrorDict] | None = Field(default=None) + errors: list[ErrorDict] | None = None progress: float | None = Field( default=None, ge=0.0, le=1.0, description="current progress of the task if available", ) - last_heartbeat: datetime.datetime | None = Field( + last_heartbeat: dt.datetime | None = Field( ..., description="Last time the running task was checked by the backend" ) - created: datetime.datetime - modified: datetime.datetime + created: Annotated[dt.datetime, BeforeValidator(validate_legacy_datetime_str)] + modified: Annotated[dt.datetime, BeforeValidator(validate_legacy_datetime_str)] # Additional information about price and hardware (ex. AWS EC2 instance type) pricing_info: dict | None hardware_info: HardwareInfo @@ -165,9 +167,9 @@ def _convert_state_from_state_type_enum_if_needed(cls, v): @field_validator("start", "end", "submit") @classmethod - def _ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: + def _ensure_utc(cls, v: dt.datetime | None) -> dt.datetime | None: if v is not None and v.tzinfo is None: - v = v.replace(tzinfo=datetime.UTC) + v = v.replace(tzinfo=dt.UTC) return v @field_validator("hardware_info", mode="before") From 1d197e57d03bb50cfdd5600f718a9218d1a1408e Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 09:09:47 +0100 Subject: [PATCH 104/121] fix nullable --- .../src/simcore_service_director_v2/models/comp_tasks.py | 2 +- services/director-v2/tests/unit/test_utils_computation.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index e5213de8f8e..00d81534702 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -150,7 +150,7 @@ class CompTaskAtDB(BaseModel): created: Annotated[dt.datetime, BeforeValidator(validate_legacy_datetime_str)] modified: Annotated[dt.datetime, BeforeValidator(validate_legacy_datetime_str)] # Additional information about price and hardware (ex. AWS EC2 instance type) - pricing_info: dict | None + pricing_info: dict | None = None hardware_info: HardwareInfo @field_validator("state", mode="before") diff --git a/services/director-v2/tests/unit/test_utils_computation.py b/services/director-v2/tests/unit/test_utils_computation.py index 184a65d0db7..14c9ffa34f3 100644 --- a/services/director-v2/tests/unit/test_utils_computation.py +++ b/services/director-v2/tests/unit/test_utils_computation.py @@ -27,7 +27,7 @@ def fake_task_file(mocks_dir: Path): @pytest.fixture(scope="session") def fake_task(fake_task_file: Path) -> CompTaskAtDB: - return CompTaskAtDB.parse_file(fake_task_file) + return CompTaskAtDB.model_validate_json(fake_task_file.read_text()) # NOTE: these parametrizations are made to mimic something like a sleepers project From 2ce21df613590862cf704ab7b578ecc86dc2a3d9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 09:29:26 +0100 Subject: [PATCH 105/121] fix mock --- .../src/simcore_service_director_v2/models/comp_tasks.py | 2 +- services/director-v2/tests/mocks/fake_task.json | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 00d81534702..e5213de8f8e 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -150,7 +150,7 @@ class CompTaskAtDB(BaseModel): created: Annotated[dt.datetime, BeforeValidator(validate_legacy_datetime_str)] modified: Annotated[dt.datetime, BeforeValidator(validate_legacy_datetime_str)] # Additional information about price and hardware (ex. AWS EC2 instance type) - pricing_info: dict | None = None + pricing_info: dict | None hardware_info: HardwareInfo @field_validator("state", mode="before") diff --git a/services/director-v2/tests/mocks/fake_task.json b/services/director-v2/tests/mocks/fake_task.json index b26ebfa9ba5..00a9dfe3501 100644 --- a/services/director-v2/tests/mocks/fake_task.json +++ b/services/director-v2/tests/mocks/fake_task.json @@ -60,6 +60,7 @@ "end": "2008-03-24T07:02:09.279Z", "created": "1961-07-06T11:24:30.877Z", "modified": "2008-03-24T07:02:09.279Z", + "pricing_info": null, "last_heartbeat": null, "hardware_info": { "aws_ec2_instances": [] From fd03fbc2c2a5519a4bbf894dfc385342ea110b6a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 09:42:48 +0100 Subject: [PATCH 106/121] fix serialization --- .../src/simcore_service_director_v2/models/comp_tasks.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index e5213de8f8e..103931ab916 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -21,7 +21,6 @@ ByteSize, ConfigDict, Field, - PlainSerializer, PositiveInt, TypeAdapter, ValidationInfo, @@ -118,7 +117,7 @@ class NodeSchema(BaseModel): class CompTaskAtDB(BaseModel): project_id: ProjectID - node_id: Annotated[NodeID, PlainSerializer(str, return_type=str)] + node_id: NodeID job_id: str | None = Field(default=None, description="The worker job ID") node_schema: NodeSchema = Field(..., alias="schema") inputs: InputsDict | None = Field(..., description="the inputs payload") @@ -181,7 +180,7 @@ def _backward_compatible_null_value(cls, v: HardwareInfo | None) -> HardwareInfo def to_db_model(self, **exclusion_rules) -> dict[str, Any]: comp_task_dict = self.model_dump( - by_alias=True, exclude_unset=True, **exclusion_rules + mode="json", by_alias=True, exclude_unset=True, **exclusion_rules ) if "state" in comp_task_dict: comp_task_dict["state"] = RUNNING_STATE_TO_DB[comp_task_dict["state"]].value From c5a66a91929d4da1d94f2945cdc003f8f4ce040a Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 10:54:56 +0100 Subject: [PATCH 107/121] revert validator --- .../src/models_library/clusters.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py index d27ce710f27..243144600e9 100644 --- a/packages/models-library/src/models_library/clusters.py +++ b/packages/models-library/src/models_library/clusters.py @@ -1,6 +1,6 @@ from enum import auto from pathlib import Path -from typing import Final, Literal, Self, TypeAlias +from typing import Final, Literal, TypeAlias from pydantic import ( AnyUrl, @@ -221,13 +221,14 @@ class Cluster(BaseCluster): }, ) - @model_validator(mode="after") - def _check_owner_has_access_rights(self) -> Self: - is_default_cluster = bool(self.id == DEFAULT_CLUSTER_ID) - owner_gid = self.owner + @model_validator(mode="before") + @classmethod + def check_owner_has_access_rights(cls, values): + is_default_cluster = bool(values["id"] == DEFAULT_CLUSTER_ID) + owner_gid = values["owner"] # check owner is in the access rights, if not add it - access_rights = self.access_rights or {} + access_rights = values.get("access_rights", values.get("accessRights", {})) if owner_gid not in access_rights: access_rights[owner_gid] = ( CLUSTER_USER_RIGHTS if is_default_cluster else CLUSTER_ADMIN_RIGHTS @@ -238,5 +239,5 @@ def _check_owner_has_access_rights(self) -> Self: ): msg = f"the cluster owner access rights are incorrectly set: {access_rights[owner_gid]}" raise ValueError(msg) - self.access_rights = access_rights - return self + values["access_rights"] = access_rights + return values From be1e296ca85b843d2f495d95fba28c828664e4b8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 15:42:04 +0100 Subject: [PATCH 108/121] fix type --- .../src/models_library/service_settings_labels.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 7c14abf65b0..0a1c36342fb 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -276,7 +276,7 @@ class DynamicSidecarServiceLabels(BaseModel): ), ) - compose_spec: Json[ComposeSpecLabelDict | None] | None = Field( + compose_spec: Json[ComposeSpecLabelDict] | None = Field( None, alias="simcore.service.compose-spec", description=( From f3ebf75e9cb47228c12ea5dfa15cf09b3253a426 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 15:53:53 +0100 Subject: [PATCH 109/121] fix test --- services/director-v2/tests/mocks/fake_scheduler_data.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/tests/mocks/fake_scheduler_data.json b/services/director-v2/tests/mocks/fake_scheduler_data.json index 0f7b03e3369..8a7c110938b 100644 --- a/services/director-v2/tests/mocks/fake_scheduler_data.json +++ b/services/director-v2/tests/mocks/fake_scheduler_data.json @@ -7,7 +7,7 @@ ], "state_exclude": null }, - "compose_spec": "null", + "compose_spec": null, "container_http_entry": null, "restart_policy": "no-restart", "key": "simcore/services/dynamic/jupyter-math", From 0bacc3b93b7b9c4f6102b718be6182b61b7831d7 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 16:05:55 +0100 Subject: [PATCH 110/121] compose_spec nullable --- services/director-v2/tests/mocks/fake_scheduler_data.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/tests/mocks/fake_scheduler_data.json b/services/director-v2/tests/mocks/fake_scheduler_data.json index 8a7c110938b..0f7b03e3369 100644 --- a/services/director-v2/tests/mocks/fake_scheduler_data.json +++ b/services/director-v2/tests/mocks/fake_scheduler_data.json @@ -7,7 +7,7 @@ ], "state_exclude": null }, - "compose_spec": null, + "compose_spec": "null", "container_http_entry": null, "restart_policy": "no-restart", "key": "simcore/services/dynamic/jupyter-math", From 6e0dc86f905e4a15abf617ad40315fe396e155e4 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Wed, 13 Nov 2024 16:11:44 +0100 Subject: [PATCH 111/121] fix nullable --- .../src/models_library/service_settings_labels.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index 0a1c36342fb..7c14abf65b0 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -276,7 +276,7 @@ class DynamicSidecarServiceLabels(BaseModel): ), ) - compose_spec: Json[ComposeSpecLabelDict] | None = Field( + compose_spec: Json[ComposeSpecLabelDict | None] | None = Field( None, alias="simcore.service.compose-spec", description=( From 5e691840bb96e180937b9c13e33fd37f4569c6ad Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 10:04:51 +0100 Subject: [PATCH 112/121] fix cluster serialization --- .../models-library/src/models_library/clusters.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/clusters.py b/packages/models-library/src/models_library/clusters.py index 243144600e9..8b38913c246 100644 --- a/packages/models-library/src/models_library/clusters.py +++ b/packages/models-library/src/models_library/clusters.py @@ -2,6 +2,7 @@ from pathlib import Path from typing import Final, Literal, TypeAlias +from models_library.utils._original_fastapi_encoders import jsonable_encoder from pydantic import ( AnyUrl, BaseModel, @@ -224,6 +225,8 @@ class Cluster(BaseCluster): @model_validator(mode="before") @classmethod def check_owner_has_access_rights(cls, values): + values = jsonable_encoder(values) + is_default_cluster = bool(values["id"] == DEFAULT_CLUSTER_ID) owner_gid = values["owner"] @@ -231,11 +234,15 @@ def check_owner_has_access_rights(cls, values): access_rights = values.get("access_rights", values.get("accessRights", {})) if owner_gid not in access_rights: access_rights[owner_gid] = ( - CLUSTER_USER_RIGHTS if is_default_cluster else CLUSTER_ADMIN_RIGHTS + CLUSTER_USER_RIGHTS.model_dump() + if is_default_cluster + else CLUSTER_ADMIN_RIGHTS.model_dump() ) # check owner has the expected access if access_rights[owner_gid] != ( - CLUSTER_USER_RIGHTS if is_default_cluster else CLUSTER_ADMIN_RIGHTS + CLUSTER_USER_RIGHTS.model_dump() + if is_default_cluster + else CLUSTER_ADMIN_RIGHTS.model_dump() ): msg = f"the cluster owner access rights are incorrectly set: {access_rights[owner_gid]}" raise ValueError(msg) From 374619682b51260b76af5140767bc9ac3d4d3516 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 10:25:05 +0100 Subject: [PATCH 113/121] fix datetime --- .../src/common_library/pydantic_validators.py | 14 -------------- .../models/comp_tasks.py | 12 +++++------- 2 files changed, 5 insertions(+), 21 deletions(-) diff --git a/packages/common-library/src/common_library/pydantic_validators.py b/packages/common-library/src/common_library/pydantic_validators.py index c30413e3622..a0122fccbe8 100644 --- a/packages/common-library/src/common_library/pydantic_validators.py +++ b/packages/common-library/src/common_library/pydantic_validators.py @@ -5,20 +5,6 @@ from pydantic import TypeAdapter, field_validator -def validate_legacy_datetime_str(v: str | dt.datetime) -> dt.datetime: - if isinstance(v, dt.datetime): - return v - try: - return dt.datetime.fromisoformat(v) - except ValueError: - pass - - try: - return dt.datetime.strptime(v, "%Y-%m-%d %H:%M:%S.%f%z") - except ValueError: - raise ValueError("Timestamp must be in a recognized datetime format") - - def _validate_legacy_timedelta_str(time_str: str | dt.timedelta) -> str | dt.timedelta: if not isinstance(time_str, str): return time_str diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 103931ab916..e45e7aea896 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -1,8 +1,7 @@ import datetime as dt from contextlib import suppress -from typing import Annotated, Any +from typing import Any -from common_library.pydantic_validators import validate_legacy_datetime_str from dask_task_models_library.container_tasks.protocol import ContainerEnvsDict from models_library.api_schemas_directorv2.services import NodeRequirements from models_library.basic_regex import SIMPLE_VERSION_RE @@ -17,7 +16,6 @@ from models_library.services_resources import BootMode from pydantic import ( BaseModel, - BeforeValidator, ByteSize, ConfigDict, Field, @@ -146,8 +144,8 @@ class CompTaskAtDB(BaseModel): last_heartbeat: dt.datetime | None = Field( ..., description="Last time the running task was checked by the backend" ) - created: Annotated[dt.datetime, BeforeValidator(validate_legacy_datetime_str)] - modified: Annotated[dt.datetime, BeforeValidator(validate_legacy_datetime_str)] + created: dt.datetime + modified: dt.datetime # Additional information about price and hardware (ex. AWS EC2 instance type) pricing_info: dict | None hardware_info: HardwareInfo @@ -234,8 +232,8 @@ def to_db_model(self, **exclusion_rules) -> dict[str, Any]: "state": "NOT_STARTED", "progress": 0.44, "last_heartbeat": None, - "created": "2022-05-20 13:28:31.139+00", - "modified": "2023-06-23 15:58:32.833081+00", + "created": "2022-05-20 13:28:31.139", + "modified": "2023-06-23 15:58:32.833081", "pricing_info": { "pricing_plan_id": 1, "pricing_unit_id": 1, From 6057dd4571ce8946c6711bcf43bf8e27ad2e0729 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 10:51:10 +0100 Subject: [PATCH 114/121] fix url --- packages/settings-library/src/settings_library/s3.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index 95268b41920..18f23860658 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,20 +1,16 @@ -from typing import Annotated - -from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter +from pydantic import AnyHttpUrl, Field from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings from .basic_types import IDStr -ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) - class S3Settings(BaseCustomSettings): S3_ACCESS_KEY: IDStr S3_BUCKET_NAME: IDStr - S3_ENDPOINT: Annotated[ - str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) - ] | None = Field(default=None, description="do not define if using standard AWS") + S3_ENDPOINT: AnyHttpUrl | None = Field( + default=None, description="do not define if using standard AWS" + ) S3_REGION: IDStr S3_SECRET_KEY: IDStr From e0f8b084a14b3f363deb15be982c321ba73fc802 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 11:01:11 +0100 Subject: [PATCH 115/121] fix url --- .../modules/dynamic_sidecar/docker_service_specs/sidecar.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index 13f13ad28c5..f344c93422f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -199,7 +199,7 @@ def _get_environment_variables( "NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS": f"{app_settings.DIRECTOR_V2_NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS}", } if r_clone_settings.R_CLONE_S3.S3_ENDPOINT is not None: - envs["S3_ENDPOINT"] = r_clone_settings.R_CLONE_S3.S3_ENDPOINT + envs["S3_ENDPOINT"] = f"{r_clone_settings.R_CLONE_S3.S3_ENDPOINT}" return envs From 0f2e38e04799864e311bf1e285698b7a958c953c Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 11:05:28 +0100 Subject: [PATCH 116/121] revert --- .../src/simcore_service_director_v2/cli/_core.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py index d5f2de11dbf..028a882f268 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py @@ -12,7 +12,7 @@ from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceType from models_library.services_enums import ServiceBootType, ServiceState -from pydantic import AnyHttpUrl, BaseModel, PositiveInt +from pydantic import AnyHttpUrl, BaseModel, PositiveInt, TypeAdapter from rich.live import Live from rich.table import Table from servicelib.services_utils import get_service_from_key @@ -58,8 +58,8 @@ def _get_dynamic_sidecar_endpoint( dynamic_sidecar_names = DynamicSidecarNamesHelper.make(NodeID(node_id)) hostname = dynamic_sidecar_names.service_name_dynamic_sidecar port = settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_PORT - url = AnyHttpUrl.build( # pylint: disable=no-member - scheme="http", host=hostname, port=port + url: AnyHttpUrl = TypeAdapter(AnyHttpUrl).validate_python( + f"http://{hostname}:{port}" ) return url From fcf9631bde5c86801464e8168cf36878f8417355 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 11:17:38 +0100 Subject: [PATCH 117/121] fix json --- services/director-v2/tests/unit/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index 2856dffb5fe..2123a506ad7 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -27,6 +27,7 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import RunID, ServiceKey, ServiceKeyVersion, ServiceVersion from models_library.services_enums import ServiceState +from models_library.utils._original_fastapi_encoders import jsonable_encoder from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -200,7 +201,7 @@ def mocked_storage_service_api( respx_mock.post( "/simcore-s3:access", name="get_or_create_temporary_s3_access", - ).respond(json={"data": fake_s3_settings.model_dump(by_alias=True)}) + ).respond(json=jsonable_encoder({"data": fake_s3_settings}, by_alias=True)) yield respx_mock From a10e5b835045a985b051090f5a01c45bdd6bcfc9 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 12:00:51 +0100 Subject: [PATCH 118/121] fix url --- .../dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py index e5b78bd286a..1016cfd5c5c 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/file_utils.py @@ -75,7 +75,7 @@ def _s3fs_settings_from_s3_settings(s3_settings: S3Settings) -> S3FsSettingsDict # setting it for the us-east-1 creates issue when creating buckets (which we do in tests) s3fs_settings["client_kwargs"]["region_name"] = s3_settings.S3_REGION if s3_settings.S3_ENDPOINT is not None: - s3fs_settings["client_kwargs"]["endpoint_url"] = s3_settings.S3_ENDPOINT + s3fs_settings["client_kwargs"]["endpoint_url"] = f"{s3_settings.S3_ENDPOINT}" return s3fs_settings From 6c6d8bd2d1a019cc96edc69238ab85d3e0bd12d8 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 12:19:30 +0100 Subject: [PATCH 119/121] fix url --- packages/pytest-simcore/src/pytest_simcore/minio_service.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index 38b9d2bdf8d..ddd3bbb994f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -4,6 +4,7 @@ import pytest from faker import Faker +from pydantic import AnyHttpUrl, TypeAdapter from pytest_simcore.helpers.docker import get_service_published_port from pytest_simcore.helpers.host import get_localhost_ip from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -20,7 +21,9 @@ def minio_s3_settings( return S3Settings( S3_ACCESS_KEY=testing_environ_vars["S3_ACCESS_KEY"], S3_SECRET_KEY=testing_environ_vars["S3_SECRET_KEY"], - S3_ENDPOINT=f"http://{get_localhost_ip()}:{get_service_published_port('minio')}", + S3_ENDPOINT=TypeAdapter(AnyHttpUrl).validate_python( + f"http://{get_localhost_ip()}:{get_service_published_port('minio')}" + ), S3_BUCKET_NAME=testing_environ_vars["S3_BUCKET_NAME"], S3_REGION="us-east-1", ) From 1e9ff2d552a48e0af829b441a220e95997eec909 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 12:29:56 +0100 Subject: [PATCH 120/121] fix url --- packages/aws-library/src/aws_library/s3/_client.py | 2 +- packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py | 4 ++-- .../tests/integration/test_node_ports_common_aws_s3_cli.py | 4 +++- .../tests/integration/test_node_ports_common_r_clone.py | 4 +++- .../tests/integration/test_modules_long_running_tasks.py | 2 +- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/aws-library/src/aws_library/s3/_client.py b/packages/aws-library/src/aws_library/s3/_client.py index 38e7c0f9636..5f89a2cee7c 100644 --- a/packages/aws-library/src/aws_library/s3/_client.py +++ b/packages/aws-library/src/aws_library/s3/_client.py @@ -73,7 +73,7 @@ async def create( session = aioboto3.Session() session_client = session.client( "s3", - endpoint_url=settings.S3_ENDPOINT, + endpoint_url=f"{settings.S3_ENDPOINT}", aws_access_key_id=settings.S3_ACCESS_KEY, aws_secret_access_key=settings.S3_SECRET_KEY, region_name=settings.S3_REGION, diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py index e6afeac8e7b..012deb09622 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py @@ -25,14 +25,14 @@ async def s3_client(s3_settings: S3Settings) -> typing.AsyncIterator[S3Client]: exit_stack = contextlib.AsyncExitStack() session_client = session.client( "s3", - endpoint_url=s3_settings.S3_ENDPOINT, + endpoint_url=f"{s3_settings.S3_ENDPOINT}", aws_access_key_id=s3_settings.S3_ACCESS_KEY, aws_secret_access_key=s3_settings.S3_SECRET_KEY, region_name=s3_settings.S3_REGION, config=Config(signature_version="s3v4"), ) assert isinstance(session_client, ClientCreatorContext) - client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] + client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client)) # type: ignore[arg-type] yield client diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py index 0c0c03b0363..717a428a1ed 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py @@ -50,7 +50,9 @@ async def cleanup_bucket_after_test( yield - async with session.client("s3", endpoint_url=aws_s3_cli_settings.AWS_S3_CLI_S3.S3_ENDPOINT) as s3_client: # type: ignore + async with session.client( + "s3", endpoint_url=f"{aws_s3_cli_settings.AWS_S3_CLI_S3.S3_ENDPOINT}" + ) as s3_client: # List all object versions paginator = s3_client.get_paginator("list_object_versions") async for page in paginator.paginate( diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py index 5d728aad51d..c94fc524bec 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py @@ -51,7 +51,9 @@ async def cleanup_bucket_after_test( yield - async with session.client("s3", endpoint_url=r_clone_settings.R_CLONE_S3.S3_ENDPOINT) as s3_client: # type: ignore + async with session.client( + "s3", endpoint_url=f"{r_clone_settings.R_CLONE_S3.S3_ENDPOINT}" + ) as s3_client: # List all object versions paginator = s3_client.get_paginator("list_object_versions") async for page in paginator.paginate( diff --git a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py index a1b64635e58..cb5631ec20d 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py @@ -313,7 +313,7 @@ async def s3_client(s3_settings: S3Settings) -> AsyncIterable[S3Client]: session = aioboto3.Session() session_client = session.client( "s3", - endpoint_url=s3_settings.S3_ENDPOINT, + endpoint_url=f"{s3_settings.S3_ENDPOINT}", aws_access_key_id=s3_settings.S3_ACCESS_KEY, aws_secret_access_key=s3_settings.S3_SECRET_KEY, region_name=s3_settings.S3_REGION, From 6c9af46b0801a2acd293e827d37e5d974d778793 Mon Sep 17 00:00:00 2001 From: Giancarlo Romeo Date: Thu, 14 Nov 2024 12:38:19 +0100 Subject: [PATCH 121/121] fix env serialization --- packages/pytest-simcore/src/pytest_simcore/minio_service.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index ddd3bbb994f..f8adf8cda9f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -34,5 +34,7 @@ def minio_s3_settings_envs( minio_s3_settings: S3Settings, monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - changed_envs: EnvVarsDict = minio_s3_settings.model_dump(exclude_unset=True) + changed_envs: EnvVarsDict = minio_s3_settings.model_dump( + mode="json", exclude_unset=True + ) return setenvs_from_dict(monkeypatch, changed_envs)