From 893acadd1530b82b2964c19f7c0315e25ab4fa42 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 09:34:37 +0100 Subject: [PATCH 01/13] fix type --- packages/aws-library/tests/test_s3_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index 5e5efc962a5..bd853f51860 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -696,7 +696,7 @@ async def test_create_single_presigned_download_link( dest_file = tmp_path / faker.file_name() async with ClientSession() as session: - response = await session.get(download_url) + response = await session.get(f"{download_url}") response.raise_for_status() with dest_file.open("wb") as fp: fp.write(await response.read()) From f534e35c7c8b6ab9c7f477c8c87ae2de53835aae Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 09:47:07 +0100 Subject: [PATCH 02/13] fixed weird assert --- .../rpc_interfaces/resource_usage_tracker/service_runs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py index ad7b2fd908b..9d4bd57204c 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py @@ -1,5 +1,5 @@ import logging -from typing import Final, cast +from typing import Final from models_library.api_schemas_resource_usage_tracker import ( RESOURCE_USAGE_TRACKER_RPC_NAMESPACE, @@ -115,5 +115,5 @@ async def export_service_runs( filters=filters, timeout_s=_DEFAULT_TIMEOUT_S, ) - assert cast(AnyUrl, isinstance(result, AnyUrl)) # nosec + assert isinstance(result, AnyUrl) # nosec return result From 4686d42a1b90520c1c51ea7e6436227a81c0c814 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 09:47:25 +0100 Subject: [PATCH 03/13] now returns a proper AnyUrl --- .../api/rpc/_resource_tracker.py | 3 ++- .../services/resource_tracker_service_runs.py | 14 ++++++++------ ...t_api_resource_tracker_service_runs__export.py | 15 ++++++++------- 3 files changed, 18 insertions(+), 14 deletions(-) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_resource_tracker.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_resource_tracker.py index 9da69bad6f3..cae70b1152c 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_resource_tracker.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/api/rpc/_resource_tracker.py @@ -24,6 +24,7 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.users import UserID from models_library.wallets import WalletID +from pydantic import AnyUrl from servicelib.rabbitmq import RPCRouter from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( CustomResourceUsageTrackerError, @@ -78,7 +79,7 @@ async def export_service_runs( access_all_wallet_usage: bool = False, order_by: OrderBy | None = None, filters: ServiceResourceUsagesFilters | None = None, -) -> str: +) -> AnyUrl: app_settings: ApplicationSettings = app.state.settings s3_settings = app_settings.RESOURCE_USAGE_TRACKER_S3 assert s3_settings # nosec diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py index e5145f8f6bd..8e7719927bc 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta, timezone import shortuuid from aws_library.s3 import SimcoreS3API @@ -18,7 +18,7 @@ from models_library.rest_ordering import OrderBy from models_library.users import UserID from models_library.wallets import WalletID -from pydantic import PositiveInt, TypeAdapter +from pydantic import AnyUrl, PositiveInt, TypeAdapter from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import ( CustomResourceUsageTrackerError, ) @@ -144,6 +144,7 @@ async def list_service_runs( async def export_service_runs( s3_client: SimcoreS3API, + *, bucket_name: str, s3_region: str, user_id: UserID, @@ -153,7 +154,7 @@ async def export_service_runs( access_all_wallet_usage: bool = False, order_by: OrderBy | None = None, filters: ServiceResourceUsagesFilters | None = None, -) -> str: +) -> AnyUrl: started_from = filters.started_at.from_ if filters else None started_until = filters.started_at.until if filters else None @@ -161,7 +162,9 @@ async def export_service_runs( s3_bucket_name = TypeAdapter(S3BucketName).validate_python(bucket_name) # NOTE: su stands for "service usage" file_name = f"su_{shortuuid.uuid()}.csv" - s3_object_key = f"resource-usage-tracker-service-runs/{datetime.now(tz=timezone.utc).date()}/{file_name}" + s3_object_key = ( + f"resource-usage-tracker-service-runs/{datetime.now(tz=UTC).date()}/{file_name}" + ) # Export CSV to S3 await resource_tracker_repo.export_service_runs_table_to_s3( @@ -177,12 +180,11 @@ async def export_service_runs( ) # Create presigned S3 link - generated_url = await s3_client.create_single_presigned_download_link( + return await s3_client.create_single_presigned_download_link( bucket=s3_bucket_name, object_key=s3_object_key, expiration_secs=_PRESIGNED_LINK_EXPIRATION_SEC, ) - return f"{generated_url}" async def get_osparc_credits_aggregated_usages_page( diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py index e433d8c13e6..0cb3db3733e 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py @@ -1,3 +1,8 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments + import os from unittest.mock import Mock @@ -25,25 +30,21 @@ @pytest.fixture async def mocked_export(mocker: MockerFixture): - mock_export = mocker.patch( + return mocker.patch( "simcore_service_resource_usage_tracker.services.resource_tracker_service_runs.ResourceTrackerRepository.export_service_runs_table_to_s3", autospec=True, ) - return mock_export - @pytest.fixture async def mocked_presigned_link(mocker: MockerFixture): - mock_presigned_link = mocker.patch( + return mocker.patch( "simcore_service_resource_usage_tracker.services.resource_tracker_service_runs.SimcoreS3API.create_single_presigned_download_link", return_value=TypeAdapter(AnyUrl).validate_python( "https://www.testing.com/", ), ) - return mock_presigned_link - @pytest.fixture async def enable_resource_usage_tracker_s3( @@ -76,6 +77,6 @@ async def test_rpc_list_service_runs_which_was_billed( user_id=_USER_ID, product_name="osparc", ) - assert isinstance(download_url, AnyUrl) + assert isinstance(download_url, AnyUrl) # nosec assert mocked_export.called assert mocked_presigned_link.called From b9be3630d4ec57e70f608ae856ae78ac40696017 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:23:33 +0100 Subject: [PATCH 04/13] fix merge from pydantic v2 --- .../simcore_sdk/node_ports_v2/nodeports_v2.py | 43 +++++++++++++++---- 1 file changed, 34 insertions(+), 9 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index 2ecf1422866..bc44698a593 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -1,6 +1,7 @@ import logging +import traceback from abc import ABC, abstractmethod -from asyncio import CancelledError +from asyncio import CancelledError, Task from collections.abc import Callable, Coroutine from pathlib import Path from typing import Any @@ -12,6 +13,7 @@ from models_library.services_types import ServicePortKey from models_library.users import UserID from pydantic import BaseModel, ConfigDict, Field, ValidationError +from pydantic_core import InitErrorDetails from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from settings_library.aws_s3_cli import AwsS3CliSettings @@ -28,6 +30,28 @@ log = logging.getLogger(__name__) +# -> @GitHK this looks very dangerous, using a lot of protected stuff, just checking the number of ignores shows it's a bad idea... +def _format_error(task: Task) -> str: + # pylint:disable=protected-access + assert task._exception # nosec # noqa: SLF001 + error_list = traceback.format_exception( + type(task._exception), # noqa: SLF001 + task._exception, # noqa: SLF001 + task._exception.__traceback__, # noqa: SLF001 + ) + return "\n".join(error_list) + + +def _get_error_details(task: Task, port_key: str) -> InitErrorDetails: + # pylint:disable=protected-access + return InitErrorDetails( + type="value_error", + loc=(f"{port_key}",), + input=_format_error(task), + ctx={"error": task._exception}, # noqa: SLF001 + ) + + class OutputsCallbacks(ABC): @abstractmethod async def aborted(self, key: ServicePortKey) -> None: @@ -72,9 +96,9 @@ def __init__(self, **data: Any): # let's pass ourselves down for input_key in self.internal_inputs: - self.internal_inputs[input_key]._node_ports = self + self.internal_inputs[input_key]._node_ports = self # noqa: SLF001 for output_key in self.internal_outputs: - self.internal_outputs[output_key]._node_ports = self + self.internal_outputs[output_key]._node_ports = self # noqa: SLF001 @property async def inputs(self) -> InputsList: @@ -132,10 +156,11 @@ async def set( async def set_file_by_keymap(self, item_value: Path) -> None: for output in (await self.outputs).values(): - if is_file_type(output.property_type) and output.file_to_key_map: - if item_value.name in output.file_to_key_map: - await output.set(item_value) - return + if (is_file_type(output.property_type) and output.file_to_key_map) and ( + item_value.name in output.file_to_key_map + ): + await output.set(item_value) + return raise PortNotFound(msg=f"output port for item {item_value} not found") async def _node_ports_creator_cb(self, node_uuid: NodeIDStr) -> type["Nodeports"]: @@ -152,9 +177,9 @@ async def _auto_update_from_db(self) -> None: # let's pass ourselves down # pylint: disable=protected-access for input_key in self.internal_inputs: - self.internal_inputs[input_key]._node_ports = self + self.internal_inputs[input_key]._node_ports = self # noqa: SLF001 for output_key in self.internal_outputs: - self.internal_outputs[output_key]._node_ports = self + self.internal_outputs[output_key]._node_ports = self # noqa: SLF001 async def set_multiple( self, From f4945c1ef50c51206cc067d9c7964694e0615b20 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:34:58 +0100 Subject: [PATCH 05/13] wrong imports --- .../src/simcore_service_dynamic_sidecar/core/utils.py | 4 ++-- .../modules/outputs/_manager.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py index 697dc673b8e..4e6f9ee0df5 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py @@ -7,7 +7,7 @@ from typing import NamedTuple import psutil -from models_library.error_codes import create_error_code +from common_library.error_codes import create_error_code from servicelib.logging_errors import create_troubleshotting_log_kwargs from ..modules.mounted_fs import MountedVolumes @@ -74,7 +74,7 @@ async def async_command( try: stdout, _ = await asyncio.wait_for(proc.communicate(), timeout=timeout) - except asyncio.TimeoutError: + except TimeoutError: proc.terminate() _close_transport(proc) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py index fc33fe660ac..adf046dd468 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py @@ -6,6 +6,7 @@ from datetime import timedelta from functools import partial +from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI from models_library.basic_types import IDStr from models_library.rabbitmq_messages import ProgressType From ea2df3e2e18bd2c4ac4e73ce120e39d022f7334a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:35:57 +0100 Subject: [PATCH 06/13] fixed reqs --- .../dynamic-scheduler/requirements/_base.txt | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index 7493081203d..4487b628b11 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -102,7 +102,6 @@ fastapi==0.115.2 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in - # prometheus-fastapi-instrumentator faststream==0.5.28 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.4.1 @@ -255,8 +254,15 @@ orjson==3.10.7 # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in packaging==24.1 # via -r requirements/_base.in pamqp==3.3.0 @@ -314,8 +320,15 @@ pydantic-core==2.23.4 # via pydantic pydantic-extra-types==2.9.0 # via + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.5.2 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -462,7 +475,7 @@ typing-extensions==4.12.2 # typer u-msgpack-python==2.8.0 # via -r requirements/_base.in -urllib3==2.2.2 +urllib3==2.2.3 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt From 41210967798c2e3354a4a0d739534d52a088c660 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:36:54 +0100 Subject: [PATCH 07/13] fixed wrong field name --- .../services/service_tracker/_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py index 6b1b5b1a75d..70e063462bc 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_models.py @@ -89,7 +89,7 @@ def __setattr__(self, name, value): last_state_change: float = Field( default_factory=lambda: arrow.utcnow().timestamp(), - metadata={"description": "keeps track when the current_state was last updated"}, + description="keeps track when the current_state was last updated", ) ############################# From 56e42b1fdff5f565016158b30efd55b95a56e03b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:42:08 +0100 Subject: [PATCH 08/13] some forgotten fixes --- services/dynamic-sidecar/tests/unit/test_modules_notifier.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py index 51855ffd20f..380e6fa639c 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_notifier.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_notifier.py @@ -131,7 +131,7 @@ async def _assert_call_count(mock: AsyncMock, *, call_count: int) -> None: def _get_mocked_disk_usage(byte_size_str: str) -> DiskUsage: return DiskUsage( - total=ByteSize.validate(byte_size_str), + total=TypeAdapter(ByteSize).validate_python(byte_size_str), used=ByteSize(0), free=TypeAdapter(ByteSize).validate_python(byte_size_str), used_percent=0, @@ -222,7 +222,7 @@ async def test_notifier_publish_disk_usage( @pytest.fixture def port_key() -> ServicePortKey: - return ServicePortKey("test_port") + return TypeAdapter(ServicePortKey).validate_python("test_port") def _get_on_input_port_spy( From e5b147a442185c733005f3ed39dc287bc965828a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:43:34 +0100 Subject: [PATCH 09/13] missing fix --- .../tests/unit/test_node_ports_common_r_clone_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_r_clone_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_r_clone_utils.py index 13d2bed6042..e2d9b890ba5 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_r_clone_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_r_clone_utils.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock import pytest -from pydantic import parse_raw_as +from pydantic import TypeAdapter from simcore_sdk.node_ports_common.r_clone_utils import ( SyncProgressLogParser, _RCloneSyncMessageBase, @@ -68,7 +68,7 @@ ], ) async def test_rclone_stbc_message_parsing_regression(log_message: str, expected: type): - parsed_log = parse_raw_as(_RCloneSyncMessages, log_message) # type: ignore[arg-type] + parsed_log = TypeAdapter(_RCloneSyncMessages).validate_json(log_message) assert isinstance(parsed_log, expected) progress_log_parser = SyncProgressLogParser(AsyncMock()) From 697970d729305b03a96446c3768ed7b1eb039be0 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:51:16 +0100 Subject: [PATCH 10/13] removed occurences of parse_obj --- .../node_ports_common/aws_s3_cli_utils.py | 4 +- .../simcore_sdk/node_ports_v2/port_utils.py | 8 ++-- .../simcore-sdk/tests/integration/conftest.py | 4 +- .../test_node_data_data_manager.py | 19 ++++++--- .../test_node_ports_common_aws_s3_cli.py | 32 ++++++++------- .../test_node_ports_common_filemanager.py | 39 ++++++++++++------- .../test_node_ports_common_r_clone.py | 36 ++++++++++------- 7 files changed, 86 insertions(+), 56 deletions(-) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli_utils.py index 5cfbb536583..3c7a8bd9d60 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli_utils.py @@ -1,7 +1,7 @@ import logging import re -from pydantic import ByteSize, parse_obj_as +from pydantic import ByteSize, TypeAdapter from servicelib.logging_utils import log_catch from servicelib.progress_bar import ProgressBarData @@ -35,5 +35,5 @@ async def __call__(self, logs: str) -> None: _logger.debug("received logs: %s", logs) with log_catch(_logger, reraise=False): if _size := _parse_size(logs): - _bytes = parse_obj_as(ByteSize, _size) + _bytes = TypeAdapter(ByteSize).validate_python(_size) await self.progress_bar.set_(_bytes) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py index 655c9576408..3c1462d6fab 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py @@ -8,8 +8,7 @@ from models_library.basic_types import IDStr, SHA256Str from models_library.services_types import FileName, ServicePortKey from models_library.users import UserID -from pydantic import AnyUrl, ByteSize -from pydantic.tools import parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.progress_bar import ProgressBarData from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.r_clone import RCloneSettings @@ -64,6 +63,7 @@ async def get_value_from_link( file_name = other_value.name # move the file to the right final location # if a file alias is present use it + if file_to_key_map: file_name = next(iter(file_to_key_map)) @@ -101,7 +101,7 @@ async def get_download_link_from_storage( # could raise ValidationError but will never do it since assert isinstance(link, URL) # nosec - url: AnyUrl = parse_obj_as(AnyUrl, f"{link}") + url: AnyUrl = TypeAdapter(AnyUrl).validate_python(f"{link}") return url @@ -123,7 +123,7 @@ async def get_download_link_from_storage_overload( s3_object=s3_object, link_type=link_type, ) - url: AnyUrl = parse_obj_as(AnyUrl, f"{link}") + url: AnyUrl = TypeAdapter(AnyUrl).validate_python(f"{link}") return url diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index 92b6afaa81b..552a629565a 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -17,7 +17,7 @@ from models_library.generics import Envelope from models_library.projects_nodes_io import LocationID, NodeIDStr, SimcoreS3FileID from models_library.users import UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from pytest_simcore.helpers.faker_factories import random_project, random_user from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.r_clone import RCloneSettings, S3Provider @@ -94,7 +94,7 @@ def create_valid_file_uuid( ) -> Callable[[str, Path], SimcoreS3FileID]: def _create(key: str, file_path: Path) -> SimcoreS3FileID: clean_path = Path(f"{project_id}/{node_uuid}/{key}/{file_path.name}") - return parse_obj_as(SimcoreS3FileID, f"{clean_path}") + return TypeAdapter(SimcoreS3FileID).validate_python(f"{clean_path}") return _create diff --git a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py index ca7a81e6c17..ed2033813e1 100644 --- a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py @@ -14,10 +14,11 @@ import pytest from faker import Faker +from models_library.basic_types import IDStr from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID -from pydantic import parse_obj_as +from pydantic import TypeAdapter from servicelib.progress_bar import ProgressBarData from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.r_clone import RCloneSettings @@ -157,7 +158,9 @@ async def test_valid_upload_download( mock_io_log_redirect_cb: LogRedirectCB, faker: Faker, ): - async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: + async with ProgressBarData( + num_steps=2, description=IDStr(faker.pystr()) + ) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id=user_id, project_id=project_id, @@ -203,7 +206,9 @@ async def test_valid_upload_download_saved_to( mock_io_log_redirect_cb: LogRedirectCB, faker: Faker, ): - async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: + async with ProgressBarData( + num_steps=2, description=IDStr(faker.pystr()) + ) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id=user_id, project_id=project_id, @@ -251,7 +256,9 @@ async def test_delete_legacy_archive( temp_dir: Path, faker: Faker, ): - async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: + async with ProgressBarData( + num_steps=2, description=IDStr(faker.pystr()) + ) as progress_bar: # NOTE: legacy archives can no longer be crated # generating a "legacy style archive" archive_into_dir = temp_dir / f"legacy-archive-dir-{uuid4()}" @@ -263,8 +270,8 @@ async def test_delete_legacy_archive( user_id=user_id, store_id=SIMCORE_LOCATION, store_name=None, - s3_object=parse_obj_as( - SimcoreS3FileID, f"{project_id}/{node_uuid}/{legacy_archive_name.name}" + s3_object=TypeAdapter(SimcoreS3FileID).validate_python( + f"{project_id}/{node_uuid}/{legacy_archive_name.name}" ), path_to_upload=legacy_archive_name, io_log_redirect_cb=None, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py index b20f280e291..0c0c03b0363 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py @@ -14,8 +14,9 @@ import aiofiles import pytest from faker import Faker +from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.file_utils import remove_directory from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather @@ -74,8 +75,7 @@ async def cleanup_bucket_after_test( # put to shared config def _fake_s3_link(aws_s3_cli_settings: AwsS3CliSettings, s3_object: str) -> AnyUrl: - return parse_obj_as( - AnyUrl, + return TypeAdapter(AnyUrl).validate_python( f"s3://{aws_s3_cli_settings.AWS_S3_CLI_S3.S3_BUCKET_NAME}/{urllib.parse.quote(s3_object)}", ) @@ -85,7 +85,7 @@ async def _create_random_binary_file( file_path: Path, file_size: ByteSize, # NOTE: bigger files get created faster with bigger chunk_size - chunk_size: int = parse_obj_as(ByteSize, "1mib"), + chunk_size: int = TypeAdapter(ByteSize).validate_python("1mib"), ): async with aiofiles.open(file_path, mode="wb") as file: bytes_written = 0 @@ -148,7 +148,7 @@ async def _report_progress_upload(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_upload, - description=faker.pystr(), + description=IDStr(faker.pystr()), ) as progress_bar: await aws_s3_cli.sync_local_to_s3( aws_s3_cli_settings, @@ -175,7 +175,7 @@ async def _report_progress_download(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_download, - description=faker.pystr(), + description=IDStr(faker.pystr()), ) as progress_bar: await aws_s3_cli.sync_s3_to_local( aws_s3_cli_settings, @@ -246,15 +246,21 @@ async def dir_downloaded_files_2(tmp_path: Path, faker: Faker) -> AsyncIterator[ @pytest.mark.parametrize( "file_count, file_size, check_progress", [ - (0, parse_obj_as(ByteSize, "0"), False), - (1, parse_obj_as(ByteSize, "1mib"), False), - (2, parse_obj_as(ByteSize, "1mib"), False), - (1, parse_obj_as(ByteSize, "1Gib"), True), + (0, TypeAdapter(ByteSize).validate_python("0"), False), + (1, TypeAdapter(ByteSize).validate_python("1mib"), False), + (2, TypeAdapter(ByteSize).validate_python("1mib"), False), + (1, TypeAdapter(ByteSize).validate_python("1Gib"), True), pytest.param( - 4, parse_obj_as(ByteSize, "500Mib"), True, marks=pytest.mark.heavy_load + 4, + TypeAdapter(ByteSize).validate_python("500Mib"), + True, + marks=pytest.mark.heavy_load, ), pytest.param( - 100, parse_obj_as(ByteSize, "20mib"), True, marks=pytest.mark.heavy_load + 100, + TypeAdapter(ByteSize).validate_python("20mib"), + True, + marks=pytest.mark.heavy_load, ), ], ) @@ -372,7 +378,7 @@ async def test_overwrite_an_existing_file_and_sync_again( generated_file_names: set[str] = await _create_files_in_dir( dir_locally_created_files, 3, - parse_obj_as(ByteSize, "1kib"), + TypeAdapter(ByteSize).validate_python("1kib"), ) assert len(generated_file_names) > 0 diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index 56f696bb46d..2e435d68a18 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -13,13 +13,14 @@ import pytest from aiohttp import ClientError from faker import Faker +from models_library.basic_types import IDStr from models_library.projects_nodes_io import ( LocationID, SimcoreS3DirectoryID, SimcoreS3FileID, ) from models_library.users import UserID -from pydantic import BaseModel, ByteSize, parse_obj_as +from pydantic import BaseModel, ByteSize, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.parametrizations import byte_size_ids from servicelib.progress_bar import ProgressBarData @@ -70,7 +71,9 @@ def optional_sync_settings( def _file_size(size_str: str, **pytest_params): - return pytest.param(parse_obj_as(ByteSize, size_str), id=size_str, **pytest_params) + return pytest.param( + TypeAdapter(ByteSize).validate_python(size_str), id=size_str, **pytest_params + ) @pytest.mark.parametrize( @@ -99,7 +102,9 @@ async def test_valid_upload_download( file_path = create_file_of_size(file_size, "test.test") file_id = create_valid_file_uuid("", file_path) - async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: + async with ProgressBarData( + num_steps=2, description=IDStr(faker.pystr()) + ) as progress_bar: upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, store_id=s3_simcore_location, @@ -187,7 +192,9 @@ async def test_valid_upload_download_using_file_object( assert file_metadata.etag == e_tag download_folder = Path(tmpdir) / "downloads" - async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + async with ProgressBarData( + num_steps=1, description=IDStr(faker.pystr()) + ) as progress_bar: download_file_path = await filemanager.download_path_from_s3( user_id=user_id, store_id=s3_simcore_location, @@ -356,9 +363,9 @@ async def test_invalid_file_path( ) download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + with pytest.raises(exceptions.S3InvalidPathError): async with ProgressBarData( - num_steps=1, description=faker.pystr() + num_steps=1, description=IDStr(faker.pystr()) ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, @@ -412,7 +419,7 @@ async def test_errors_upon_invalid_file_identifiers( download_folder = Path(tmpdir) / "downloads" with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 async with ProgressBarData( - num_steps=1, description=faker.pystr() + num_steps=1, description=IDStr(faker.pystr()) ) as progress_bar: invalid_s3_path = SimcoreS3FileID("") await filemanager.download_path_from_s3( @@ -427,15 +434,17 @@ async def test_errors_upon_invalid_file_identifiers( aws_s3_cli_settings=optional_sync_settings.aws_s3_cli_settings, ) - with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 + with pytest.raises(exceptions.S3InvalidPathError): async with ProgressBarData( - num_steps=1, description=faker.pystr() + num_steps=1, description=IDStr(faker.pystr()) ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, store_id=store, store_name=None, - s3_object=SimcoreS3FileID(f"{project_id}/{uuid4()}/invisible.txt"), + s3_object=TypeAdapter(SimcoreS3FileID).validate_python( + f"{project_id}/{uuid4()}/invisible.txt" + ), local_path=download_folder, io_log_redirect_cb=None, r_clone_settings=optional_sync_settings.r_clone_settings, @@ -469,9 +478,9 @@ async def test_invalid_store( ) download_folder = Path(tmpdir) / "downloads" - with pytest.raises(exceptions.S3InvalidStore): # noqa: PT012 + with pytest.raises(exceptions.S3InvalidStore): async with ProgressBarData( - num_steps=1, description=faker.pystr() + num_steps=1, description=IDStr(faker.pystr()) ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, @@ -662,7 +671,7 @@ async def test_upload_path_source_is_a_folder( directory_id = SimcoreS3DirectoryID.from_simcore_s3_object( f"{project_id}/{faker.uuid4()}/some-dir-in-node-root/" ) - s3_object = SimcoreS3FileID(directory_id) + s3_object = TypeAdapter(SimcoreS3FileID).validate_python(directory_id) upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, @@ -677,7 +686,9 @@ async def test_upload_path_source_is_a_folder( assert isinstance(upload_result, UploadedFolder) assert source_dir.exists() - async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: + async with ProgressBarData( + num_steps=1, description=IDStr(faker.pystr()) + ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, store_name=None, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py index 64e7d34ef1f..5d728aad51d 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py @@ -15,8 +15,9 @@ import aiofiles import pytest from faker import Faker +from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport -from pydantic import AnyUrl, ByteSize, parse_obj_as +from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.file_utils import remove_directory from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather @@ -74,9 +75,8 @@ async def cleanup_bucket_after_test( def _fake_s3_link(r_clone_settings: RCloneSettings, s3_object: str) -> AnyUrl: - return parse_obj_as( - AnyUrl, - f"s3://{r_clone_settings.R_CLONE_S3.S3_BUCKET_NAME}/{urllib.parse.quote(s3_object)}", + return TypeAdapter(AnyUrl).validate_python( + f"s3://{r_clone_settings.R_CLONE_S3.S3_BUCKET_NAME}/{urllib.parse.quote(s3_object)}" ) @@ -99,7 +99,7 @@ async def _create_random_binary_file( file_path: Path, file_size: ByteSize, # NOTE: bigger files get created faster with bigger chunk_size - chunk_size: int = parse_obj_as(ByteSize, "1mib"), + chunk_size: int = TypeAdapter(ByteSize).validate_python("1mib"), ): async with aiofiles.open(file_path, mode="wb") as file: bytes_written = 0 @@ -160,7 +160,7 @@ async def _report_progress_upload(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_upload, - description=faker.pystr(), + description=IDStr(faker.pystr()), ) as progress_bar: await r_clone.sync_local_to_s3( r_clone_settings, @@ -187,13 +187,13 @@ async def _report_progress_download(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_download, - description=faker.pystr(), + description=IDStr(faker.pystr()), ) as progress_bar: await r_clone.sync_s3_to_local( r_clone_settings, progress_bar, local_directory_path=destination_dir, - download_s3_link=s3_directory_link, + download_s3_link=f"{s3_directory_link}", debug_logs=True, ) @@ -258,15 +258,21 @@ async def dir_downloaded_files_2(tmp_path: Path, faker: Faker) -> AsyncIterator[ @pytest.mark.parametrize( "file_count, file_size, check_progress", [ - (0, parse_obj_as(ByteSize, "0"), False), - (1, parse_obj_as(ByteSize, "1mib"), False), - (2, parse_obj_as(ByteSize, "1mib"), False), - (1, parse_obj_as(ByteSize, "1Gib"), True), + (0, TypeAdapter(ByteSize).validate_python("0"), False), + (1, TypeAdapter(ByteSize).validate_python("1mib"), False), + (2, TypeAdapter(ByteSize).validate_python("1mib"), False), + (1, TypeAdapter(ByteSize).validate_python("1Gib"), True), pytest.param( - 4, parse_obj_as(ByteSize, "500Mib"), True, marks=pytest.mark.heavy_load + 4, + TypeAdapter(ByteSize).validate_python("500Mib"), + True, + marks=pytest.mark.heavy_load, ), pytest.param( - 100, parse_obj_as(ByteSize, "20mib"), True, marks=pytest.mark.heavy_load + 100, + TypeAdapter(ByteSize).validate_python("20mib"), + True, + marks=pytest.mark.heavy_load, ), ], ) @@ -384,7 +390,7 @@ async def test_overwrite_an_existing_file_and_sync_again( generated_file_names: set[str] = await _create_files_in_dir( dir_locally_created_files, r_clone_settings.R_CLONE_OPTION_TRANSFERS * 3, - parse_obj_as(ByteSize, "1kib"), + TypeAdapter(ByteSize).validate_python("1kib"), ) assert len(generated_file_names) > 0 From 5b8432937ebbf45d717cc4aaccc91c51f2e2758a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:52:09 +0100 Subject: [PATCH 11/13] relative imports --- .../src/simcore_service_payments/services/payments_gateway.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py index a1ac9d0b061..f7989f6fa56 100644 --- a/services/payments/src/simcore_service_payments/services/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py @@ -25,9 +25,6 @@ HealthMixinMixin, ) from servicelib.fastapi.httpx_utils import to_curl_command -from simcore_service_payments.models.schemas.acknowledgements import ( - AckPaymentWithPaymentMethod, -) from ..core.settings import ApplicationSettings from ..models.payments_gateway import ( @@ -41,6 +38,7 @@ PaymentMethodInitiated, PaymentMethodsBatch, ) +from ..models.schemas.acknowledgements import AckPaymentWithPaymentMethod _logger = logging.getLogger(__name__) From 233da09903f067275294a28e470caf08ad3118a1 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:56:10 +0100 Subject: [PATCH 12/13] removed occurences of parse_obj_as --- .../tests/test_utils_projects.py | 4 ++-- .../rabbitmq/rpc_interfaces/agent/volumes.py | 10 +++++++--- .../rpc_interfaces/dynamic_sidecar/disk_usage.py | 16 ++++++++++++---- .../src/osparc_gateway_server/backend/models.py | 10 +++++----- 4 files changed, 26 insertions(+), 14 deletions(-) diff --git a/packages/postgres-database/tests/test_utils_projects.py b/packages/postgres-database/tests/test_utils_projects.py index 9af318fca38..c0c00d271e6 100644 --- a/packages/postgres-database/tests/test_utils_projects.py +++ b/packages/postgres-database/tests/test_utils_projects.py @@ -12,7 +12,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from faker import Faker -from pydantic import parse_obj_as +from pydantic import TypeAdapter from simcore_postgres_database.models.projects import projects from simcore_postgres_database.utils_projects import ( DBProjectNotFoundError, @@ -69,7 +69,7 @@ async def test_get_project_trashed_at_column_can_be_converted_to_datetime( row = result.fetchone() - trashed_at = parse_obj_as(datetime | None, row.trashed_at) + trashed_at = TypeAdapter(datetime | None).validate_python(row.trashed_at) assert trashed_at == expected diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py index d414cd6b979..043898dcb30 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py @@ -4,7 +4,7 @@ from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace -from pydantic import NonNegativeInt, parse_obj_as +from pydantic import NonNegativeInt, TypeAdapter from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RabbitMQRPCClient @@ -29,7 +29,9 @@ async def remove_volumes_without_backup_for_service( "swarm_stack_name": swarm_stack_name, } ), - parse_obj_as(RPCMethodName, "remove_volumes_without_backup_for_service"), + TypeAdapter(RPCMethodName).validate_python( + "remove_volumes_without_backup_for_service" + ), node_id=node_id, timeout_s=_REQUEST_TIMEOUT, ) @@ -51,7 +53,9 @@ async def backup_and_remove_volumes_for_all_services( "swarm_stack_name": swarm_stack_name, } ), - parse_obj_as(RPCMethodName, "backup_and_remove_volumes_for_all_services"), + TypeAdapter(RPCMethodName).validate_python( + "backup_and_remove_volumes_for_all_services" + ), timeout_s=_REQUEST_TIMEOUT, ) assert result is None # nosec diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/disk_usage.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/disk_usage.py index 5938ad871ff..dbace2f1f4b 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/disk_usage.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/disk_usage.py @@ -1,14 +1,20 @@ import logging +from typing import Final from models_library.api_schemas_dynamic_sidecar.telemetry import DiskUsage from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace -from pydantic import parse_obj_as -from servicelib.logging_utils import log_decorator -from servicelib.rabbitmq import RabbitMQRPCClient +from pydantic import TypeAdapter + +from ....logging_utils import log_decorator +from ... import RabbitMQRPCClient _logger = logging.getLogger(__name__) +_UPDATE_DISK_USAGE: Final[RPCMethodName] = TypeAdapter(RPCMethodName).validate_python( + "update_disk_usage" +) + @log_decorator(_logger, level=logging.DEBUG) async def update_disk_usage( @@ -21,6 +27,8 @@ async def update_disk_usage( {"service": "dy-sidecar", "node_id": f"{node_id}"} ) result = await rabbitmq_rpc_client.request( - rpc_namespace, parse_obj_as(RPCMethodName, "update_disk_usage"), usage=usage + rpc_namespace, + _UPDATE_DISK_USAGE, + usage=usage, ) assert result is None # nosec diff --git a/services/osparc-gateway-server/src/osparc_gateway_server/backend/models.py b/services/osparc-gateway-server/src/osparc_gateway_server/backend/models.py index 43e2240270d..9cdd2fc9edb 100644 --- a/services/osparc-gateway-server/src/osparc_gateway_server/backend/models.py +++ b/services/osparc-gateway-server/src/osparc_gateway_server/backend/models.py @@ -1,7 +1,8 @@ +from collections.abc import Mapping from ipaddress import IPv4Address -from typing import Any, Mapping, Union +from typing import Any, Union -from pydantic import BaseModel, ByteSize, Field, PositiveFloat, parse_obj_as +from pydantic import BaseModel, ByteSize, Field, PositiveFloat, TypeAdapter Hostname = str ResourceName = str @@ -26,8 +27,7 @@ class NodeInformation(BaseModel): def cluster_information_from_docker_nodes( nodes_list: list[Mapping[str, Any]] ) -> ClusterInformation: - return parse_obj_as( - ClusterInformation, + return TypeAdapter(ClusterInformation).validate_python( { node["Description"]["Hostname"]: { "docker_node_id": node["ID"], @@ -38,5 +38,5 @@ def cluster_information_from_docker_nodes( }, } for node in nodes_list - }, + } ) From ef80d782af9b90c413b7f4a82f2c0ef6c3ac581a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:57:30 +0100 Subject: [PATCH 13/13] removed occurences of construct() --- packages/models-library/src/models_library/docker.py | 2 +- .../src/simcore_service_dynamic_sidecar/core/rabbitmq.py | 4 ++-- .../resource_tracker_utils.py | 6 +++--- .../services/resource_tracker_service_runs.py | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py index b16c4ae13cc..b8134b3ec73 100644 --- a/packages/models-library/src/models_library/docker.py +++ b/packages/models-library/src/models_library/docker.py @@ -75,7 +75,7 @@ def to_simcore_runtime_docker_label_key(key: str) -> DockerLabelKey: class StandardSimcoreDockerLabels(BaseModel): """ Represents the standard label on oSparc created containers (not yet services) - In order to create this object in code, please use construct() method! + In order to create this object in code, please use model_construct() method! """ user_id: UserID = Field(..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}user-id") # type: ignore[literal-required] diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py index f33a43d33fb..88c77c84997 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py @@ -46,7 +46,7 @@ async def post_log_message( app: FastAPI, log: LogMessageStr, *, log_level: LogLevelInt ) -> None: app_settings: ApplicationSettings = app.state.settings - message = LoggerRabbitMessage.construct( + message = LoggerRabbitMessage.model_construct( node_id=app_settings.DY_SIDECAR_NODE_ID, user_id=app_settings.DY_SIDECAR_USER_ID, project_id=app_settings.DY_SIDECAR_PROJECT_ID, @@ -61,7 +61,7 @@ async def post_progress_message( app: FastAPI, progress_type: ProgressType, report: ProgressReport ) -> None: app_settings: ApplicationSettings = app.state.settings - message = ProgressRabbitMessageNode.construct( + message = ProgressRabbitMessageNode.model_construct( node_id=app_settings.DY_SIDECAR_NODE_ID, user_id=app_settings.DY_SIDECAR_USER_ID, project_id=app_settings.DY_SIDECAR_PROJECT_ID, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_utils.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_utils.py index 4466fc5e7de..73aa7416244 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_utils.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/resource_tracker_utils.py @@ -1,6 +1,6 @@ import asyncio import logging -from datetime import datetime, timezone +from datetime import UTC, datetime from decimal import Decimal from models_library.api_schemas_resource_usage_tracker.credit_transactions import ( @@ -41,9 +41,9 @@ async def sum_credit_transactions_and_publish_to_rabbitmq( wallet_id, ) ) - publish_message = WalletCreditsMessage.construct( + publish_message = WalletCreditsMessage.model_construct( wallet_id=wallet_id, - created_at=datetime.now(tz=timezone.utc), + created_at=datetime.now(tz=UTC), credits=wallet_total_credits.available_osparc_credits, product_name=product_name, ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py index 8e7719927bc..31171ea4104 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py @@ -116,7 +116,7 @@ async def list_service_runs( service_runs_api_model: list[ServiceRunGet] = [] for service in service_runs_db_model: service_runs_api_model.append( - ServiceRunGet.construct( + ServiceRunGet.model_construct( service_run_id=service.service_run_id, wallet_id=service.wallet_id, wallet_name=service.wallet_name, @@ -218,7 +218,7 @@ async def get_osparc_credits_aggregated_usages_page( output_api_model: list[OsparcCreditsAggregatedByServiceGet] = [] for item in output_list_db: output_api_model.append( - OsparcCreditsAggregatedByServiceGet.construct( + OsparcCreditsAggregatedByServiceGet.model_construct( osparc_credits=item.osparc_credits, service_key=item.service_key, running_time_in_hours=item.running_time_in_hours,