diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index cedb0ed35f9..25730b9db06 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -1683,7 +1683,6 @@ jobs: run: ./ci/github/unit-testing/simcore-sdk.bash install - name: typecheck run: ./ci/github/unit-testing/simcore-sdk.bash typecheck - continue-on-error: true - name: test if: always() run: ./ci/github/unit-testing/simcore-sdk.bash test diff --git a/packages/models-library/src/models_library/api_schemas_storage.py b/packages/models-library/src/models_library/api_schemas_storage.py index e68cd99d1ee..29e341456bc 100644 --- a/packages/models-library/src/models_library/api_schemas_storage.py +++ b/packages/models-library/src/models_library/api_schemas_storage.py @@ -70,7 +70,7 @@ class Config: } -FileLocationArray = ListModel[FileLocation] +FileLocationArray: TypeAlias = ListModel[FileLocation] # /locations/{location_id}/datasets diff --git a/packages/service-library/src/servicelib/common_aiopg_utils.py b/packages/service-library/src/servicelib/common_aiopg_utils.py index fba6a8a4429..cf7c6aba40d 100644 --- a/packages/service-library/src/servicelib/common_aiopg_utils.py +++ b/packages/service-library/src/servicelib/common_aiopg_utils.py @@ -3,6 +3,7 @@ This module was necessary because simcore-sdk (an aiohttp-independent package) still needs some of the helpers here. """ + import logging from dataclasses import asdict, dataclass diff --git a/packages/simcore-sdk/requirements/_test.in b/packages/simcore-sdk/requirements/_test.in index 7aafb4eb65b..5ee33c8a3cd 100644 --- a/packages/simcore-sdk/requirements/_test.in +++ b/packages/simcore-sdk/requirements/_test.in @@ -29,4 +29,7 @@ pytest-sugar pytest-xdist python-dotenv requests -types-aiobotocore[s3] # s3 storage +sqlalchemy[mypy] +types-aiobotocore[s3] +types-aiofiles +types-tqdm diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index cb44362d5fe..dd2ec4e8192 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -176,6 +176,10 @@ multidict==6.0.5 # -c requirements/_base.txt # aiohttp # yarl +mypy==1.11.0 + # via sqlalchemy +mypy-extensions==1.0.0 + # via mypy networkx==3.3 # via cfn-lint openapi-schema-validator==0.6.2 @@ -288,7 +292,10 @@ sqlalchemy==1.4.52 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt + # -r requirements/_test.in # alembic +sqlalchemy2-stubs==0.0.2a38 + # via sqlalchemy sympy==1.13.0 # via cfn-lint termcolor==2.4.0 @@ -296,20 +303,27 @@ termcolor==2.4.0 tomli==2.0.1 # via # coverage + # mypy # pytest types-aiobotocore==2.13.1 # via -r requirements/_test.in types-aiobotocore-s3==2.13.1 # via types-aiobotocore +types-aiofiles==24.1.0.20240626 + # via -r requirements/_test.in types-awscrt==0.21.2 # via botocore-stubs +types-tqdm==4.66.0.20240417 + # via -r requirements/_test.in typing-extensions==4.12.2 # via # -c requirements/_base.txt # alembic # aws-sam-translator # cfn-lint + # mypy # pydantic + # sqlalchemy2-stubs # types-aiobotocore # types-aiobotocore-s3 urllib3==2.2.2 diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt index 35f1fd3c0c0..8ba498fbd3e 100644 --- a/packages/simcore-sdk/requirements/_tools.txt +++ b/packages/simcore-sdk/requirements/_tools.txt @@ -29,9 +29,12 @@ isort==5.13.2 mccabe==0.7.0 # via pylint mypy==1.11.0 - # via -r requirements/../../../requirements/devenv.txt + # via + # -c requirements/_test.txt + # -r requirements/../../../requirements/devenv.txt mypy-extensions==1.0.0 # via + # -c requirements/_test.txt # black # mypy nodeenv==1.9.1 diff --git a/packages/simcore-sdk/setup.cfg b/packages/simcore-sdk/setup.cfg index 2167673c8ad..a572e8dbb7b 100644 --- a/packages/simcore-sdk/setup.cfg +++ b/packages/simcore-sdk/setup.cfg @@ -20,3 +20,8 @@ markers = acceptance_test: "marks tests as 'acceptance tests' i.e. does the system do what the user expects? Typically those are workflows." testit: "marks test to run during development" heavy_load: "mark tests that require large amount of data" + +[mypy] +plugins = + pydantic.mypy + sqlalchemy.ext.mypy.plugin diff --git a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py index 48a731d4e4d..398fcdcb6a1 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py @@ -2,6 +2,7 @@ from pathlib import Path from tempfile import TemporaryDirectory +from models_library.basic_types import IDStr from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, StorageFileID from models_library.users import UserID @@ -23,7 +24,7 @@ def __create_s3_object_key( project_id: ProjectID, node_uuid: NodeID, file_path: Path | str ) -> StorageFileID: file_name = file_path.name if isinstance(file_path, Path) else file_path - return parse_obj_as(StorageFileID, f"{project_id}/{node_uuid}/{file_name}") + return parse_obj_as(StorageFileID, f"{project_id}/{node_uuid}/{file_name}") # type: ignore[arg-type] def __get_s3_name(path: Path, *, is_archive: bool) -> str: @@ -97,7 +98,7 @@ async def _pull_legacy_archive( ) -> None: # NOTE: the legacy way of storing states was as zip archives async with progress_bar.sub_progress( - steps=2, description=f"pulling {destination_path.name}" + steps=2, description=IDStr(f"pulling {destination_path.name}") ) as sub_prog: with TemporaryDirectory() as tmp_dir_name: archive_file = Path(tmp_dir_name) / __get_s3_name( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py index f6a5351676d..7b5467c2851 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager.py @@ -1,4 +1,5 @@ import logging +from typing import cast from aiohttp import ClientError, ClientSession from models_library.api_schemas_storage import ( @@ -7,11 +8,10 @@ FileUploadCompleteResponse, FileUploadCompleteState, FileUploadCompletionBody, - LocationID, - LocationName, UploadedPart, ) from models_library.generics import Envelope +from models_library.projects_nodes_io import LocationID, LocationName from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import AnyUrl, parse_obj_as @@ -37,7 +37,7 @@ async def _get_location_id_from_location_name( resp = await storage_client.get_storage_locations(session=session, user_id=user_id) for location in resp: if location.name == store: - return location.id + return cast(LocationID, location.id) # mypy wants it # location id not found raise exceptions.S3InvalidStore(store) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/constants.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/constants.py index 566aa68074f..f6803ffbeb4 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/constants.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/constants.py @@ -1,6 +1,6 @@ from typing import Final -from models_library.api_schemas_storage import LocationID +from models_library.projects_nodes_io import LocationID CHUNK_SIZE: Final[int] = 16 * 1024 * 1024 MINUTE: Final[int] = 60 diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py index 0cbaf2fe80a..ffcd384a9e4 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py @@ -54,7 +54,7 @@ async def _get_node_from_db( @tenacity.retry(**PostgresRetryPolicyUponInitialization().kwargs) async def _ensure_postgres_ready(dsn: DataSourceName) -> Engine: - engine = await create_pg_engine(dsn, minsize=1, maxsize=4) + engine: aiopg.sa.Engine = await create_pg_engine(dsn, minsize=1, maxsize=4) try: await raise_if_migration_not_ready(engine) except Exception: @@ -80,7 +80,7 @@ async def _create_db_engine() -> aiopg.sa.Engine: port=settings.POSTGRES_SETTINGS.POSTGRES_PORT, ) - engine = await _ensure_postgres_ready(dsn) + engine: aiopg.sa.Engine = await _ensure_postgres_ready(dsn) return engine async def __aenter__(self): diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 0325b052883..695b710c8f8 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -19,7 +19,7 @@ ) from aiohttp.typedefs import LooseHeaders from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart -from models_library.basic_types import SHA256Str +from models_library.basic_types import IDStr, SHA256Str from pydantic import AnyUrl, NonNegativeInt from servicelib.aiohttp import status from servicelib.logging_utils import log_catch @@ -216,7 +216,7 @@ async def download_link_to_file( sub_progress = await stack.enter_async_context( progress_bar.sub_progress( steps=file_size or 1, - description=f"downloading {file_path.name}", + description=IDStr(f"downloading {file_path.name}"), ) ) @@ -400,7 +400,7 @@ async def upload_file_to_presigned_links( ) sub_progress = await stack.enter_async_context( progress_bar.sub_progress( - steps=file_size, description=f"uploading {file_name}" + steps=file_size, description=IDStr(f"uploading {file_name}") ) ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index 32f108a31f1..5581f801c3f 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -10,12 +10,10 @@ FileMetaDataGet, FileUploadSchema, LinkType, - LocationID, - LocationName, UploadedPart, ) -from models_library.basic_types import SHA256Str -from models_library.projects_nodes_io import StorageFileID +from models_library.basic_types import IDStr, SHA256Str +from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, parse_obj_as from servicelib.file_utils import create_sha256_checksum @@ -338,7 +336,7 @@ async def _upload_path( ) if not progress_bar: - progress_bar = ProgressBarData(num_steps=1, description="uploading") + progress_bar = ProgressBarData(num_steps=1, description=IDStr("uploading")) is_directory: bool = isinstance(path_to_upload, Path) and path_to_upload.is_dir() if is_directory and not await r_clone.is_r_clone_available(r_clone_settings): diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index 6fee4bda267..5fe5df2c4b6 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -8,8 +8,9 @@ from pathlib import Path from typing import Final -from aiocache import cached +from aiocache import cached # type: ignore[import-untyped] from aiofiles import tempfile +from models_library.basic_types import IDStr from pydantic import AnyUrl, BaseModel, ByteSize from pydantic.errors import PydanticErrorMixin from servicelib.progress_bar import ProgressBarData @@ -224,7 +225,7 @@ async def _sync_sources( async with progress_bar.sub_progress( steps=folder_size, progress_unit="Byte", - description=f"transferring {local_dir.name}", + description=IDStr(f"transferring {local_dir.name}"), ) as sub_progress: r_clone_log_parsers: list[BaseRCloneLogParser] = ( [DebugLogParser()] if debug_logs else [] diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py index 3727c18d08e..ed64f74137f 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone_utils.py @@ -37,7 +37,7 @@ class _RCloneSyncTransferringStats(BaseModel): bytes: ByteSize total_bytes: ByteSize - class Config: # type: ignore[pydantic-alias] + class Config: alias_generator = snake_to_camel diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py index 5f3a68bc8a3..c249cbcf830 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/storage_client.py @@ -1,10 +1,10 @@ import datetime import logging -from collections.abc import AsyncIterator, Awaitable, Callable +from collections.abc import AsyncIterator, Callable from contextlib import asynccontextmanager from functools import wraps from json import JSONDecodeError -from typing import Any, TypeAlias +from typing import Any, Coroutine, ParamSpec, TypeAlias, TypeVar from urllib.parse import quote from aiohttp import ClientResponse, ClientSession @@ -15,12 +15,11 @@ FileMetaDataGet, FileUploadSchema, LinkType, - LocationID, PresignedLink, - StorageFileID, ) from models_library.basic_types import SHA256Str from models_library.generics import Envelope +from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize from pydantic.networks import AnyUrl @@ -42,10 +41,15 @@ aiohttp_client_module._RequestContextManager # pylint: disable=protected-access # noqa: SLF001 ) +P = ParamSpec("P") +R = TypeVar("R") -def handle_client_exception(handler: Callable) -> Callable[..., Awaitable[Any]]: + +def handle_client_exception( + handler: Callable[P, Coroutine[Any, Any, R]] +) -> Callable[P, Coroutine[Any, Any, R]]: @wraps(handler) - async def wrapped(*args, **kwargs): + async def wrapped(*args: P.args, **kwargs: P.kwargs) -> R: try: return await handler(*args, **kwargs) except ClientResponseError as err: @@ -69,6 +73,9 @@ async def wrapped(*args, **kwargs): except JSONDecodeError as err: msg = f"{err}" raise exceptions.StorageServerIssue(msg) from err + # satisfy mypy + msg = "Unhandled control flow" + raise RuntimeError(msg) return wrapped @@ -239,6 +246,7 @@ async def get_file_metadata( raise exceptions.S3InvalidPathError(file_id) file_metadata_enveloped = Envelope[FileMetaDataGet].parse_obj(payload) + assert file_metadata_enveloped.data # nosec return file_metadata_enveloped.data diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py index 4613ea6931b..d8eb1d99349 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/links.py @@ -8,7 +8,7 @@ class PortLink(BasePortLink): - node_uuid: str = Field(..., regex=UUID_RE, alias="nodeUuid") + node_uuid: str = Field(..., regex=UUID_RE, alias="nodeUuid") # type: ignore[assignment] # This overrides the base class it is ugly but needs its own PR to fix it class FileLink(BaseFileLink): diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index 3bb4cfc9d2d..8418a006b42 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -3,6 +3,7 @@ from typing import Any, Callable, Coroutine from models_library.api_schemas_storage import LinkType +from models_library.basic_types import IDStr from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.users import UserID @@ -149,7 +150,7 @@ async def set_multiple( """ tasks = [] async with progress_bar.sub_progress( - steps=len(port_values.items()), description="set multiple" + steps=len(port_values.items()), description=IDStr("set multiple") ) as sub_progress: for port_key, (value, set_kwargs) in port_values.items(): # pylint: disable=protected-access diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 5fe16c48ff2..e78b5a6581f 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -8,6 +8,7 @@ from models_library.api_schemas_storage import LinkType from models_library.basic_regex import PROPERTY_KEY_RE +from models_library.basic_types import IDStr from models_library.services_io import BaseServiceIOModel from pydantic import AnyUrl, Field, PrivateAttr, ValidationError, validator from pydantic.tools import parse_obj_as @@ -248,7 +249,7 @@ async def _evaluate() -> ItemConcreteValue | None: key=self.key, value=self.value, file_to_key_map=self.file_to_key_map, - node_port_creator=self._node_ports._node_ports_creator_cb, + node_port_creator=self._node_ports._node_ports_creator_cb, # noqa: SLF001 progress_bar=progress_bar, ) value = other_port_concretevalue @@ -366,7 +367,7 @@ async def set( new_concrete_value=new_value, **set_kwargs, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description="set"), + or ProgressBarData(num_steps=1, description=IDStr("set")), ) await self._node_ports.save_to_db_cb(self._node_ports) @@ -397,7 +398,7 @@ async def set_value(self, new_item_value: ItemValue | None) -> None: new_item_value ) self.value_concrete = None - self.value = new_concrete_value + self.value = new_concrete_value # type: ignore[assignment] self.value_item = None self.value_concrete = None diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py index 0c441efeda7..4bc977b46f6 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py @@ -5,7 +5,8 @@ from typing import Any from models_library.api_schemas_storage import FileUploadSchema, LinkType -from models_library.basic_types import SHA256Str +from models_library.basic_types import IDStr, SHA256Str +from models_library.services_types import FileName, ServicePortKey from models_library.users import UserID from pydantic import AnyUrl, ByteSize from pydantic.tools import parse_obj_as @@ -31,7 +32,7 @@ async def get_value_link_from_port_link( ) -> ItemValue | None: log.debug("Getting value link %s", value) # create a node ports for the other node - other_nodeports = await node_port_creator(value.node_uuid) + other_nodeports = await node_port_creator(f"{value.node_uuid}") # get the port value through that guy log.debug("Received node from DB %s, now returning value link", other_nodeports) @@ -44,14 +45,14 @@ async def get_value_link_from_port_link( async def get_value_from_link( key: str, value: PortLink, - file_to_key_map: dict[str, str] | None, + file_to_key_map: dict[FileName, ServicePortKey] | None, node_port_creator: Callable[[str], Coroutine[Any, Any, Any]], *, progress_bar: ProgressBarData | None, ) -> ItemConcreteValue | None: log.debug("Getting value %s", value) # create a node ports for the other node - other_nodeports = await node_port_creator(value.node_uuid) + other_nodeports = await node_port_creator(f"{value.node_uuid}") # get the port value through that guy log.debug("Received node from DB %s, now returning value", other_nodeports) @@ -183,7 +184,7 @@ async def delete_target_link( async def pull_file_from_store( user_id: UserID, key: str, - file_to_key_map: dict[str, str] | None, + file_to_key_map: dict[FileName, ServicePortKey] | None, value: FileLink, io_log_redirect_cb: LogRedirectCB | None, r_clone_settings: RCloneSettings | None, @@ -201,7 +202,7 @@ async def pull_file_from_store( io_log_redirect_cb=io_log_redirect_cb, r_clone_settings=r_clone_settings, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description="pulling file"), + or ProgressBarData(num_steps=1, description=IDStr("pulling file")), ) # if a file alias is present use it to rename the file accordingly if file_to_key_map: @@ -257,7 +258,7 @@ async def push_file_to_store( async def pull_file_from_download_link( key: str, - file_to_key_map: dict[str, str] | None, + file_to_key_map: dict[FileName, ServicePortKey] | None, value: DownloadLink, io_log_redirect_cb: LogRedirectCB | None, progress_bar: ProgressBarData | None, @@ -275,7 +276,7 @@ async def pull_file_from_download_link( local_path, io_log_redirect_cb=io_log_redirect_cb, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description="pulling file"), + or ProgressBarData(num_steps=1, description=IDStr("pulling file")), ) # if a file alias is present use it to rename the file accordingly diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py index 0dc7552115b..3016f17ae2e 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/serialization_v2.py @@ -5,7 +5,8 @@ from typing import Any import pydantic -from models_library.projects_nodes import NodeID +from models_library.projects_nodes_io import NodeID +from models_library.utils.json_serialization import json_dumps from models_library.utils.nodes import compute_node_hash from packaging import version from settings_library.r_clone import RCloneSettings @@ -145,7 +146,7 @@ async def get_node_io_payload_cb(node_id: NodeID) -> dict[str, Any]: ) # convert to DB - port_cfg = { + port_cfg: dict[str, Any] = { "schema": {"inputs": {}, "outputs": {}}, "inputs": {}, "outputs": {}, @@ -164,14 +165,14 @@ async def get_node_io_payload_cb(node_id: NodeID) -> dict[str, Any]: # pylint: disable=protected-access if ( port_values["value"] is not None - and not getattr(nodeports, f"internal_{port_type}")[ + and not getattr(nodeports, f"internal_{port_type}")[ # noqa: SLF001 port_key ]._used_default_value ): port_cfg[port_type][port_key] = port_values["value"] await nodeports.db_manager.write_ports_configuration( - json.dumps(port_cfg), + json_dumps(port_cfg), nodeports.project_id, nodeports.node_uuid, )