Skip to content

Commit

Permalink
♻️Maintenance: mypy simcore sdk (#6118)
Browse files Browse the repository at this point in the history
  • Loading branch information
sanderegg authored Jul 31, 2024
1 parent 2d36783 commit e97b4e5
Show file tree
Hide file tree
Showing 21 changed files with 83 additions and 46 deletions.
1 change: 0 additions & 1 deletion .github/workflows/ci-testing-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1683,7 +1683,6 @@ jobs:
run: ./ci/github/unit-testing/simcore-sdk.bash install
- name: typecheck
run: ./ci/github/unit-testing/simcore-sdk.bash typecheck
continue-on-error: true
- name: test
if: always()
run: ./ci/github/unit-testing/simcore-sdk.bash test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class Config:
}


FileLocationArray = ListModel[FileLocation]
FileLocationArray: TypeAlias = ListModel[FileLocation]


# /locations/{location_id}/datasets
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
This module was necessary because simcore-sdk (an aiohttp-independent package) still needs some
of the helpers here.
"""

import logging
from dataclasses import asdict, dataclass

Expand Down
5 changes: 4 additions & 1 deletion packages/simcore-sdk/requirements/_test.in
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,7 @@ pytest-sugar
pytest-xdist
python-dotenv
requests
types-aiobotocore[s3] # s3 storage
sqlalchemy[mypy]
types-aiobotocore[s3]
types-aiofiles
types-tqdm
14 changes: 14 additions & 0 deletions packages/simcore-sdk/requirements/_test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,10 @@ multidict==6.0.5
# -c requirements/_base.txt
# aiohttp
# yarl
mypy==1.11.0
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
networkx==3.3
# via cfn-lint
openapi-schema-validator==0.6.2
Expand Down Expand Up @@ -288,28 +292,38 @@ sqlalchemy==1.4.52
# via
# -c requirements/../../../requirements/constraints.txt
# -c requirements/_base.txt
# -r requirements/_test.in
# alembic
sqlalchemy2-stubs==0.0.2a38
# via sqlalchemy
sympy==1.13.0
# via cfn-lint
termcolor==2.4.0
# via pytest-sugar
tomli==2.0.1
# via
# coverage
# mypy
# pytest
types-aiobotocore==2.13.1
# via -r requirements/_test.in
types-aiobotocore-s3==2.13.1
# via types-aiobotocore
types-aiofiles==24.1.0.20240626
# via -r requirements/_test.in
types-awscrt==0.21.2
# via botocore-stubs
types-tqdm==4.66.0.20240417
# via -r requirements/_test.in
typing-extensions==4.12.2
# via
# -c requirements/_base.txt
# alembic
# aws-sam-translator
# cfn-lint
# mypy
# pydantic
# sqlalchemy2-stubs
# types-aiobotocore
# types-aiobotocore-s3
urllib3==2.2.2
Expand Down
5 changes: 4 additions & 1 deletion packages/simcore-sdk/requirements/_tools.txt
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,12 @@ isort==5.13.2
mccabe==0.7.0
# via pylint
mypy==1.11.0
# via -r requirements/../../../requirements/devenv.txt
# via
# -c requirements/_test.txt
# -r requirements/../../../requirements/devenv.txt
mypy-extensions==1.0.0
# via
# -c requirements/_test.txt
# black
# mypy
nodeenv==1.9.1
Expand Down
5 changes: 5 additions & 0 deletions packages/simcore-sdk/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,8 @@ markers =
acceptance_test: "marks tests as 'acceptance tests' i.e. does the system do what the user expects? Typically those are workflows."
testit: "marks test to run during development"
heavy_load: "mark tests that require large amount of data"

[mypy]
plugins =
pydantic.mypy
sqlalchemy.ext.mypy.plugin
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from pathlib import Path
from tempfile import TemporaryDirectory

from models_library.basic_types import IDStr
from models_library.projects import ProjectID
from models_library.projects_nodes_io import NodeID, StorageFileID
from models_library.users import UserID
Expand All @@ -23,7 +24,7 @@ def __create_s3_object_key(
project_id: ProjectID, node_uuid: NodeID, file_path: Path | str
) -> StorageFileID:
file_name = file_path.name if isinstance(file_path, Path) else file_path
return parse_obj_as(StorageFileID, f"{project_id}/{node_uuid}/{file_name}")
return parse_obj_as(StorageFileID, f"{project_id}/{node_uuid}/{file_name}") # type: ignore[arg-type]


def __get_s3_name(path: Path, *, is_archive: bool) -> str:
Expand Down Expand Up @@ -97,7 +98,7 @@ async def _pull_legacy_archive(
) -> None:
# NOTE: the legacy way of storing states was as zip archives
async with progress_bar.sub_progress(
steps=2, description=f"pulling {destination_path.name}"
steps=2, description=IDStr(f"pulling {destination_path.name}")
) as sub_prog:
with TemporaryDirectory() as tmp_dir_name:
archive_file = Path(tmp_dir_name) / __get_s3_name(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
from typing import cast

from aiohttp import ClientError, ClientSession
from models_library.api_schemas_storage import (
Expand All @@ -7,11 +8,10 @@
FileUploadCompleteResponse,
FileUploadCompleteState,
FileUploadCompletionBody,
LocationID,
LocationName,
UploadedPart,
)
from models_library.generics import Envelope
from models_library.projects_nodes_io import LocationID, LocationName
from models_library.users import UserID
from models_library.utils.fastapi_encoders import jsonable_encoder
from pydantic import AnyUrl, parse_obj_as
Expand All @@ -37,7 +37,7 @@ async def _get_location_id_from_location_name(
resp = await storage_client.get_storage_locations(session=session, user_id=user_id)
for location in resp:
if location.name == store:
return location.id
return cast(LocationID, location.id) # mypy wants it
# location id not found
raise exceptions.S3InvalidStore(store)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from typing import Final

from models_library.api_schemas_storage import LocationID
from models_library.projects_nodes_io import LocationID

CHUNK_SIZE: Final[int] = 16 * 1024 * 1024
MINUTE: Final[int] = 60
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ async def _get_node_from_db(

@tenacity.retry(**PostgresRetryPolicyUponInitialization().kwargs)
async def _ensure_postgres_ready(dsn: DataSourceName) -> Engine:
engine = await create_pg_engine(dsn, minsize=1, maxsize=4)
engine: aiopg.sa.Engine = await create_pg_engine(dsn, minsize=1, maxsize=4)
try:
await raise_if_migration_not_ready(engine)
except Exception:
Expand All @@ -80,7 +80,7 @@ async def _create_db_engine() -> aiopg.sa.Engine:
port=settings.POSTGRES_SETTINGS.POSTGRES_PORT,
)

engine = await _ensure_postgres_ready(dsn)
engine: aiopg.sa.Engine = await _ensure_postgres_ready(dsn)
return engine

async def __aenter__(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
)
from aiohttp.typedefs import LooseHeaders
from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart
from models_library.basic_types import SHA256Str
from models_library.basic_types import IDStr, SHA256Str
from pydantic import AnyUrl, NonNegativeInt
from servicelib.aiohttp import status
from servicelib.logging_utils import log_catch
Expand Down Expand Up @@ -216,7 +216,7 @@ async def download_link_to_file(
sub_progress = await stack.enter_async_context(
progress_bar.sub_progress(
steps=file_size or 1,
description=f"downloading {file_path.name}",
description=IDStr(f"downloading {file_path.name}"),
)
)

Expand Down Expand Up @@ -400,7 +400,7 @@ async def upload_file_to_presigned_links(
)
sub_progress = await stack.enter_async_context(
progress_bar.sub_progress(
steps=file_size, description=f"uploading {file_name}"
steps=file_size, description=IDStr(f"uploading {file_name}")
)
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,10 @@
FileMetaDataGet,
FileUploadSchema,
LinkType,
LocationID,
LocationName,
UploadedPart,
)
from models_library.basic_types import SHA256Str
from models_library.projects_nodes_io import StorageFileID
from models_library.basic_types import IDStr, SHA256Str
from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID
from models_library.users import UserID
from pydantic import AnyUrl, ByteSize, parse_obj_as
from servicelib.file_utils import create_sha256_checksum
Expand Down Expand Up @@ -338,7 +336,7 @@ async def _upload_path(
)

if not progress_bar:
progress_bar = ProgressBarData(num_steps=1, description="uploading")
progress_bar = ProgressBarData(num_steps=1, description=IDStr("uploading"))

is_directory: bool = isinstance(path_to_upload, Path) and path_to_upload.is_dir()
if is_directory and not await r_clone.is_r_clone_available(r_clone_settings):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
from pathlib import Path
from typing import Final

from aiocache import cached
from aiocache import cached # type: ignore[import-untyped]
from aiofiles import tempfile
from models_library.basic_types import IDStr
from pydantic import AnyUrl, BaseModel, ByteSize
from pydantic.errors import PydanticErrorMixin
from servicelib.progress_bar import ProgressBarData
Expand Down Expand Up @@ -224,7 +225,7 @@ async def _sync_sources(
async with progress_bar.sub_progress(
steps=folder_size,
progress_unit="Byte",
description=f"transferring {local_dir.name}",
description=IDStr(f"transferring {local_dir.name}"),
) as sub_progress:
r_clone_log_parsers: list[BaseRCloneLogParser] = (
[DebugLogParser()] if debug_logs else []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class _RCloneSyncTransferringStats(BaseModel):
bytes: ByteSize
total_bytes: ByteSize

class Config: # type: ignore[pydantic-alias]
class Config:
alias_generator = snake_to_camel


Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import datetime
import logging
from collections.abc import AsyncIterator, Awaitable, Callable
from collections.abc import AsyncIterator, Callable
from contextlib import asynccontextmanager
from functools import wraps
from json import JSONDecodeError
from typing import Any, TypeAlias
from typing import Any, Coroutine, ParamSpec, TypeAlias, TypeVar
from urllib.parse import quote

from aiohttp import ClientResponse, ClientSession
Expand All @@ -15,12 +15,11 @@
FileMetaDataGet,
FileUploadSchema,
LinkType,
LocationID,
PresignedLink,
StorageFileID,
)
from models_library.basic_types import SHA256Str
from models_library.generics import Envelope
from models_library.projects_nodes_io import LocationID, StorageFileID
from models_library.users import UserID
from pydantic import ByteSize
from pydantic.networks import AnyUrl
Expand All @@ -42,10 +41,15 @@
aiohttp_client_module._RequestContextManager # pylint: disable=protected-access # noqa: SLF001
)

P = ParamSpec("P")
R = TypeVar("R")

def handle_client_exception(handler: Callable) -> Callable[..., Awaitable[Any]]:

def handle_client_exception(
handler: Callable[P, Coroutine[Any, Any, R]]
) -> Callable[P, Coroutine[Any, Any, R]]:
@wraps(handler)
async def wrapped(*args, **kwargs):
async def wrapped(*args: P.args, **kwargs: P.kwargs) -> R:
try:
return await handler(*args, **kwargs)
except ClientResponseError as err:
Expand All @@ -69,6 +73,9 @@ async def wrapped(*args, **kwargs):
except JSONDecodeError as err:
msg = f"{err}"
raise exceptions.StorageServerIssue(msg) from err
# satisfy mypy
msg = "Unhandled control flow"
raise RuntimeError(msg)

return wrapped

Expand Down Expand Up @@ -239,6 +246,7 @@ async def get_file_metadata(
raise exceptions.S3InvalidPathError(file_id)

file_metadata_enveloped = Envelope[FileMetaDataGet].parse_obj(payload)
assert file_metadata_enveloped.data # nosec
return file_metadata_enveloped.data


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


class PortLink(BasePortLink):
node_uuid: str = Field(..., regex=UUID_RE, alias="nodeUuid")
node_uuid: str = Field(..., regex=UUID_RE, alias="nodeUuid") # type: ignore[assignment] # This overrides the base class it is ugly but needs its own PR to fix it


class FileLink(BaseFileLink):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import Any, Callable, Coroutine

from models_library.api_schemas_storage import LinkType
from models_library.basic_types import IDStr
from models_library.projects import ProjectIDStr
from models_library.projects_nodes_io import NodeIDStr
from models_library.users import UserID
Expand Down Expand Up @@ -149,7 +150,7 @@ async def set_multiple(
"""
tasks = []
async with progress_bar.sub_progress(
steps=len(port_values.items()), description="set multiple"
steps=len(port_values.items()), description=IDStr("set multiple")
) as sub_progress:
for port_key, (value, set_kwargs) in port_values.items():
# pylint: disable=protected-access
Expand Down
7 changes: 4 additions & 3 deletions packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from models_library.api_schemas_storage import LinkType
from models_library.basic_regex import PROPERTY_KEY_RE
from models_library.basic_types import IDStr
from models_library.services_io import BaseServiceIOModel
from pydantic import AnyUrl, Field, PrivateAttr, ValidationError, validator
from pydantic.tools import parse_obj_as
Expand Down Expand Up @@ -248,7 +249,7 @@ async def _evaluate() -> ItemConcreteValue | None:
key=self.key,
value=self.value,
file_to_key_map=self.file_to_key_map,
node_port_creator=self._node_ports._node_ports_creator_cb,
node_port_creator=self._node_ports._node_ports_creator_cb, # noqa: SLF001
progress_bar=progress_bar,
)
value = other_port_concretevalue
Expand Down Expand Up @@ -366,7 +367,7 @@ async def set(
new_concrete_value=new_value,
**set_kwargs,
progress_bar=progress_bar
or ProgressBarData(num_steps=1, description="set"),
or ProgressBarData(num_steps=1, description=IDStr("set")),
)
await self._node_ports.save_to_db_cb(self._node_ports)

Expand Down Expand Up @@ -397,7 +398,7 @@ async def set_value(self, new_item_value: ItemValue | None) -> None:
new_item_value
)
self.value_concrete = None
self.value = new_concrete_value
self.value = new_concrete_value # type: ignore[assignment]

self.value_item = None
self.value_concrete = None
Expand Down
Loading

0 comments on commit e97b4e5

Please sign in to comment.