Skip to content

Commit

Permalink
✨Maintenance: enable mypy in storage (#4086)
Browse files Browse the repository at this point in the history
  • Loading branch information
sanderegg authored Apr 13, 2023
1 parent c6f9674 commit c1846f6
Show file tree
Hide file tree
Showing 17 changed files with 256 additions and 170 deletions.
5 changes: 5 additions & 0 deletions .github/workflows/ci-testing-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,10 @@ jobs:
run: ./ci/helpers/show_system_versions.bash
- name: install
run: ./ci/github/unit-testing/storage.bash install
- name: typecheck
run: ./ci/github/unit-testing/storage.bash typecheck
- name: test
if: always()
run: ./ci/github/unit-testing/storage.bash test
- uses: codecov/[email protected]
with:
Expand Down Expand Up @@ -844,6 +847,7 @@ jobs:
- name: typecheck
run: ./ci/github/unit-testing/dask-sidecar.bash typecheck
- name: test
if: always()
run: ./ci/github/unit-testing/dask-sidecar.bash test
- uses: codecov/[email protected]
with:
Expand Down Expand Up @@ -895,6 +899,7 @@ jobs:
pushd services/osparc-gateway-server && \
make mypy
- name: test
if: always()
run: |
source .venv/bin/activate && \
pushd services/osparc-gateway-server && \
Expand Down
5 changes: 1 addition & 4 deletions services/storage/src/simcore_service_storage/_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@
api_version: str = __version__
api_vtag: str = f"v{version.major}"

# legacy
api_version_prefix: str = api_vtag


## https://patorjk.com/software/taag/#p=display&f=Standard&t=Storage
WELCOME_MSG = r"""
Expand All @@ -25,7 +22,7 @@
\___ \| __/ _ \| '__/ _` |/ _` |/ _ \
___) | || (_) | | | (_| | (_| | __/
|____/ \__\___/|_| \__,_|\__, |\___|
|___/ {0}
|___/ {}
""".format(
f"v{__version__}"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import asyncio
import logging
from math import ceil
from typing import Any, Callable, Optional, Union, cast
from typing import Any, Callable, TypeVar, cast

import aiohttp
from aiohttp import web
Expand Down Expand Up @@ -41,10 +41,10 @@ async def _request(
method: str,
path: str,
*,
json: Optional[dict[str, Any]] = None,
params: Optional[dict[str, Any]] = None,
json: dict[str, Any] | None = None,
params: dict[str, Any] | None = None,
**request_kwargs,
) -> Union[dict[str, Any], list[dict[str, Any]]]:
) -> dict[str, Any] | list[dict[str, Any]]:
datcore_adapter_settings = app[APP_CONFIG_KEY].DATCORE_ADAPTER
url = datcore_adapter_settings.endpoint + path
session: ClientSession = get_client_session(app)
Expand All @@ -64,7 +64,9 @@ async def _request(
params=params,
**request_kwargs,
) as response:
return await response.json()
response_data = await response.json()
assert isinstance(response_data, (dict, list)) # nosec
return response_data

except aiohttp.ClientResponseError as exc:
raise _DatcoreAdapterResponseError(status=exc.status, reason=f"{exc}") from exc
Expand All @@ -78,17 +80,19 @@ async def _request(
raise DatcoreAdapterClientError(f"unexpected client error: {exc}") from exc


_T = TypeVar("_T")


async def _retrieve_all_pages(
app: web.Application,
api_key: str,
api_secret: str,
method: str,
path: str,
return_type: type,
return_type_creator: Callable,
):
return_type_creator: Callable[..., _T],
) -> list[_T]:
page = 1
objs: list[return_type] = []
objs = []
while (
response := cast(
dict[str, Any],
Expand Down Expand Up @@ -210,7 +214,6 @@ async def list_datasets(
api_secret,
"GET",
"/datasets",
DatasetMetaData,
lambda d: DatasetMetaData(dataset_id=d["id"], display_name=d["display_name"]),
)

Expand All @@ -224,7 +227,7 @@ async def get_file_download_presigned_link(
dict[str, Any],
await _request(app, api_key, api_secret, "GET", f"/files/{file_id}"),
)
return parse_obj_as(AnyUrl, file_download_data["link"])
return cast(AnyUrl, parse_obj_as(AnyUrl, file_download_data["link"])) # mypy


async def delete_file(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from functools import cached_property
from typing import cast

from models_library.basic_types import PortInt, VersionTag
from pydantic import AnyHttpUrl, Field
Expand All @@ -15,9 +16,12 @@ class DatcoreAdapterSettings(BaseCustomSettings):

@cached_property
def endpoint(self) -> str:
return AnyHttpUrl.build(
scheme="http",
host=self.DATCORE_ADAPTER_HOST,
port=f"{self.DATCORE_ADAPTER_PORT}",
path=f"/{self.DATCORE_ADAPTER_VTAG}",
return cast(
str, # mypy
AnyHttpUrl.build(
scheme="http",
host=self.DATCORE_ADAPTER_HOST,
port=f"{self.DATCORE_ADAPTER_PORT}",
path=f"/{self.DATCORE_ADAPTER_VTAG}",
),
)
13 changes: 6 additions & 7 deletions services/storage/src/simcore_service_storage/db.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import logging
from typing import Any, Optional
from typing import Any, cast

from aiohttp import web
from aiopg.sa import Engine
from servicelib.aiohttp.aiopg_utils import DataSourceName, is_pg_responsive
from servicelib.common_aiopg_utils import create_pg_engine
from servicelib.retry_policies import PostgresRetryPolicyUponInitialization
from settings_library.postgres import PostgresSettings
from simcore_postgres_database.utils_aiopg import (
close_engine,
get_pg_engine_stateinfo,
Expand All @@ -14,14 +15,12 @@
from tenacity import retry

from .constants import APP_CONFIG_KEY, APP_DB_ENGINE_KEY
from .settings import PostgresSettings

log = logging.getLogger(__name__)


@retry(**PostgresRetryPolicyUponInitialization(log).kwargs)
async def _ensure_pg_ready(dsn: DataSourceName, min_size: int, max_size: int) -> Engine:

log.info("Creating pg engine for %s", dsn)

engine = await create_pg_engine(dsn, minsize=min_size, maxsize=max_size)
Expand All @@ -31,7 +30,7 @@ async def _ensure_pg_ready(dsn: DataSourceName, min_size: int, max_size: int) ->
await close_engine(engine)
raise

return engine # type: ignore # tenacity rules guarantee exit with exc
return engine # tenacity rules guarantee exit with exc


async def postgres_cleanup_ctx(app: web.Application):
Expand All @@ -45,7 +44,7 @@ async def postgres_cleanup_ctx(app: web.Application):
password=pg_cfg.POSTGRES_PASSWORD.get_secret_value(),
host=pg_cfg.POSTGRES_HOST,
port=pg_cfg.POSTGRES_PORT,
) # type: ignore
)

log.info("Creating pg engine for %s", dsn)

Expand Down Expand Up @@ -79,9 +78,9 @@ async def is_service_responsive(app: web.Application):


def get_engine_state(app: web.Application) -> dict[str, Any]:
engine: Optional[Engine] = app.get(APP_DB_ENGINE_KEY)
engine: Engine | None = app.get(APP_DB_ENGINE_KEY)
if engine:
return get_pg_engine_stateinfo(engine)
return cast(dict[str, Any], get_pg_engine_stateinfo(engine)) # mypy
return {}


Expand Down
19 changes: 9 additions & 10 deletions services/storage/src/simcore_service_storage/db_file_meta_data.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import datetime
from typing import AsyncGenerator, Optional, Union
from typing import AsyncGenerator

import sqlalchemy as sa
from aiopg.sa.connection import SAConnection
Expand All @@ -17,7 +17,7 @@


async def exists(conn: SAConnection, file_id: SimcoreS3FileID) -> bool:
return (
return bool(
await conn.scalar(
sa.select([sa.func.count()])
.select_from(file_meta_data)
Expand All @@ -28,7 +28,7 @@ async def exists(conn: SAConnection, file_id: SimcoreS3FileID) -> bool:


async def upsert(
conn: SAConnection, fmd: Union[FileMetaData, FileMetaDataAtDB]
conn: SAConnection, fmd: FileMetaData | FileMetaDataAtDB
) -> FileMetaDataAtDB:
# NOTE: upsert file_meta_data, if the file already exists, we update the whole row
# so we get the correct time stamps
Expand Down Expand Up @@ -69,8 +69,8 @@ async def list_filter_with_partial_file_id(
*,
user_id: UserID,
project_ids: list[ProjectID],
file_id_prefix: Optional[str],
partial_file_id: Optional[str],
file_id_prefix: str | None,
partial_file_id: str | None,
) -> list[FileMetaDataAtDB]:
stmt = sa.select([file_meta_data]).where(
(
Expand All @@ -94,12 +94,11 @@ async def list_filter_with_partial_file_id(
async def list_fmds(
conn: SAConnection,
*,
user_id: Optional[UserID] = None,
project_ids: Optional[list[ProjectID]] = None,
file_ids: Optional[list[SimcoreS3FileID]] = None,
expired_after: Optional[datetime.datetime] = None,
user_id: UserID | None = None,
project_ids: list[ProjectID] | None = None,
file_ids: list[SimcoreS3FileID] | None = None,
expired_after: datetime.datetime | None = None,
) -> list[FileMetaDataAtDB]:

stmt = sa.select([file_meta_data]).where(
and_(
(file_meta_data.c.user_id == f"{user_id}") if user_id else True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ async def project_exists(
conn: SAConnection,
project_uuid: ProjectID,
) -> bool:
return (
return bool(
await conn.scalar(
sa.select([sa.func.count()])
.select_from(projects)
Expand Down
23 changes: 17 additions & 6 deletions services/storage/src/simcore_service_storage/handlers_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
parse_request_path_parameters_as,
parse_request_query_parameters_as,
)
from servicelib.json_serialization import json_dumps

# Exclusive for simcore-s3 storage -----------------------
from ._meta import api_vtag
Expand All @@ -26,8 +27,10 @@
UPLOAD_TASKS_KEY = f"{__name__}.upload_tasks"


@routes.get(f"/{api_vtag}/locations/{{location_id}}/datasets", name="get_datasets_metadata") # type: ignore
async def get_datasets_metadata(request: web.Request):
@routes.get(
f"/{api_vtag}/locations/{{location_id}}/datasets", name="get_datasets_metadata"
)
async def get_datasets_metadata(request: web.Request) -> web.Response:
query_params = parse_request_query_parameters_as(StorageQueryParamsBase, request)
path_params = parse_request_path_parameters_as(LocationPathParams, request)
log.debug(
Expand All @@ -36,11 +39,16 @@ async def get_datasets_metadata(request: web.Request):
)

dsm = get_dsm_provider(request.app).get(path_params.location_id)
return await dsm.list_datasets(query_params.user_id)
return web.json_response(
{"data": await dsm.list_datasets(query_params.user_id)}, dumps=json_dumps
)


@routes.get(f"/{api_vtag}/locations/{{location_id}}/datasets/{{dataset_id}}/metadata", name="get_files_metadata_dataset") # type: ignore
async def get_files_metadata_dataset(request: web.Request):
@routes.get(
f"/{api_vtag}/locations/{{location_id}}/datasets/{{dataset_id}}/metadata",
name="get_files_metadata_dataset",
)
async def get_files_metadata_dataset(request: web.Request) -> web.Response:
query_params = parse_request_query_parameters_as(StorageQueryParamsBase, request)
path_params = parse_request_path_parameters_as(
FilesMetadataDatasetPathParams, request
Expand All @@ -54,4 +62,7 @@ async def get_files_metadata_dataset(request: web.Request):
user_id=query_params.user_id,
dataset_id=path_params.dataset_id,
)
return [jsonable_encoder(FileMetaDataGet.from_orm(d)) for d in data]
return web.json_response(
{"data": [jsonable_encoder(FileMetaDataGet.from_orm(d)) for d in data]},
dumps=json_dumps,
)
Loading

0 comments on commit c1846f6

Please sign in to comment.