Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

♻️Pydantic V2 migration: various fixes #6647

2 changes: 1 addition & 1 deletion packages/aws-library/tests/test_s3_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -696,7 +696,7 @@ async def test_create_single_presigned_download_link(

dest_file = tmp_path / faker.file_name()
async with ClientSession() as session:
response = await session.get(download_url)
response = await session.get(f"{download_url}")
response.raise_for_status()
with dest_file.open("wb") as fp:
fp.write(await response.read())
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from typing import Final, cast
from typing import Final

from models_library.api_schemas_resource_usage_tracker import (
RESOURCE_USAGE_TRACKER_RPC_NAMESPACE,
Expand Down Expand Up @@ -115,5 +115,5 @@ async def export_service_runs(
filters=filters,
timeout_s=_DEFAULT_TIMEOUT_S,
)
assert cast(AnyUrl, isinstance(result, AnyUrl)) # nosec
assert isinstance(result, AnyUrl) # nosec
return result
43 changes: 34 additions & 9 deletions packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import logging
import traceback
from abc import ABC, abstractmethod
from asyncio import CancelledError
from asyncio import CancelledError, Task
from collections.abc import Callable, Coroutine
from pathlib import Path
from typing import Any
Expand All @@ -12,6 +13,7 @@
from models_library.services_types import ServicePortKey
from models_library.users import UserID
from pydantic import BaseModel, ConfigDict, Field, ValidationError
from pydantic_core import InitErrorDetails
from servicelib.progress_bar import ProgressBarData
from servicelib.utils import logged_gather
from settings_library.aws_s3_cli import AwsS3CliSettings
Expand All @@ -28,6 +30,28 @@
log = logging.getLogger(__name__)


# -> @GitHK this looks very dangerous, using a lot of protected stuff, just checking the number of ignores shows it's a bad idea...
sanderegg marked this conversation as resolved.
Show resolved Hide resolved
def _format_error(task: Task) -> str:
# pylint:disable=protected-access
assert task._exception # nosec # noqa: SLF001
error_list = traceback.format_exception(
type(task._exception), # noqa: SLF001
task._exception, # noqa: SLF001
task._exception.__traceback__, # noqa: SLF001
)
return "\n".join(error_list)


def _get_error_details(task: Task, port_key: str) -> InitErrorDetails:
# pylint:disable=protected-access
return InitErrorDetails(
type="value_error",
loc=(f"{port_key}",),
input=_format_error(task),
ctx={"error": task._exception}, # noqa: SLF001
)


class OutputsCallbacks(ABC):
@abstractmethod
async def aborted(self, key: ServicePortKey) -> None:
Expand Down Expand Up @@ -72,9 +96,9 @@ def __init__(self, **data: Any):

# let's pass ourselves down
for input_key in self.internal_inputs:
self.internal_inputs[input_key]._node_ports = self
self.internal_inputs[input_key]._node_ports = self # noqa: SLF001
for output_key in self.internal_outputs:
self.internal_outputs[output_key]._node_ports = self
self.internal_outputs[output_key]._node_ports = self # noqa: SLF001

@property
async def inputs(self) -> InputsList:
Expand Down Expand Up @@ -132,10 +156,11 @@ async def set(

async def set_file_by_keymap(self, item_value: Path) -> None:
for output in (await self.outputs).values():
if is_file_type(output.property_type) and output.file_to_key_map:
if item_value.name in output.file_to_key_map:
await output.set(item_value)
return
if (is_file_type(output.property_type) and output.file_to_key_map) and (
item_value.name in output.file_to_key_map
):
await output.set(item_value)
return
raise PortNotFound(msg=f"output port for item {item_value} not found")

async def _node_ports_creator_cb(self, node_uuid: NodeIDStr) -> type["Nodeports"]:
Expand All @@ -152,9 +177,9 @@ async def _auto_update_from_db(self) -> None:
# let's pass ourselves down
# pylint: disable=protected-access
for input_key in self.internal_inputs:
self.internal_inputs[input_key]._node_ports = self
self.internal_inputs[input_key]._node_ports = self # noqa: SLF001
for output_key in self.internal_outputs:
self.internal_outputs[output_key]._node_ports = self
self.internal_outputs[output_key]._node_ports = self # noqa: SLF001

async def set_multiple(
self,
Expand Down
17 changes: 15 additions & 2 deletions services/dynamic-scheduler/requirements/_base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@ fastapi==0.115.2
# via
# -r requirements/../../../packages/service-library/requirements/_fastapi.in
# -r requirements/_base.in
# prometheus-fastapi-instrumentator
faststream==0.5.28
# via -r requirements/../../../packages/service-library/requirements/_base.in
frozenlist==1.4.1
Expand Down Expand Up @@ -255,8 +254,15 @@ orjson==3.10.7
# -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
# -r requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in
packaging==24.1
# via -r requirements/_base.in
pamqp==3.3.0
Expand Down Expand Up @@ -314,8 +320,15 @@ pydantic-core==2.23.4
# via pydantic
pydantic-extra-types==2.9.0
# via
# -r requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in
# -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in
pydantic-settings==2.5.2
# via
# -r requirements/../../../packages/models-library/requirements/_base.in
Expand Down Expand Up @@ -462,7 +475,7 @@ typing-extensions==4.12.2
# typer
u-msgpack-python==2.8.0
# via -r requirements/_base.in
urllib3==2.2.2
urllib3==2.2.3
# via
# -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def __setattr__(self, name, value):

last_state_change: float = Field(
default_factory=lambda: arrow.utcnow().timestamp(),
metadata={"description": "keeps track when the current_state was last updated"},
description="keeps track when the current_state was last updated",
)

#############################
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from typing import NamedTuple

import psutil
from models_library.error_codes import create_error_code
from common_library.error_codes import create_error_code
from servicelib.logging_errors import create_troubleshotting_log_kwargs

from ..modules.mounted_fs import MountedVolumes
Expand Down Expand Up @@ -74,7 +74,7 @@ async def async_command(
try:
stdout, _ = await asyncio.wait_for(proc.communicate(), timeout=timeout)

except asyncio.TimeoutError:
except TimeoutError:
proc.terminate()
_close_transport(proc)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from datetime import timedelta
from functools import partial

from common_library.errors_classes import OsparcErrorMixin
from fastapi import FastAPI
from models_library.basic_types import IDStr
from models_library.rabbitmq_messages import ProgressType
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from models_library.services import ServiceKey, ServiceVersion
from models_library.users import UserID
from models_library.wallets import WalletID
from pydantic import AnyUrl
from servicelib.rabbitmq import RPCRouter
from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import (
CustomResourceUsageTrackerError,
Expand Down Expand Up @@ -78,7 +79,7 @@ async def export_service_runs(
access_all_wallet_usage: bool = False,
order_by: OrderBy | None = None,
filters: ServiceResourceUsagesFilters | None = None,
) -> str:
) -> AnyUrl:
app_settings: ApplicationSettings = app.state.settings
s3_settings = app_settings.RESOURCE_USAGE_TRACKER_S3
assert s3_settings # nosec
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime, timedelta, timezone

import shortuuid
from aws_library.s3 import SimcoreS3API
Expand All @@ -18,7 +18,7 @@
from models_library.rest_ordering import OrderBy
from models_library.users import UserID
from models_library.wallets import WalletID
from pydantic import PositiveInt, TypeAdapter
from pydantic import AnyUrl, PositiveInt, TypeAdapter
from servicelib.rabbitmq.rpc_interfaces.resource_usage_tracker.errors import (
CustomResourceUsageTrackerError,
)
Expand Down Expand Up @@ -144,6 +144,7 @@ async def list_service_runs(

async def export_service_runs(
s3_client: SimcoreS3API,
*,
bucket_name: str,
s3_region: str,
user_id: UserID,
Expand All @@ -153,15 +154,17 @@ async def export_service_runs(
access_all_wallet_usage: bool = False,
order_by: OrderBy | None = None,
filters: ServiceResourceUsagesFilters | None = None,
) -> str:
) -> AnyUrl:
started_from = filters.started_at.from_ if filters else None
started_until = filters.started_at.until if filters else None

# Create S3 key name
s3_bucket_name = TypeAdapter(S3BucketName).validate_python(bucket_name)
# NOTE: su stands for "service usage"
file_name = f"su_{shortuuid.uuid()}.csv"
s3_object_key = f"resource-usage-tracker-service-runs/{datetime.now(tz=timezone.utc).date()}/{file_name}"
s3_object_key = (
f"resource-usage-tracker-service-runs/{datetime.now(tz=UTC).date()}/{file_name}"
)

# Export CSV to S3
await resource_tracker_repo.export_service_runs_table_to_s3(
Expand All @@ -177,12 +180,11 @@ async def export_service_runs(
)

# Create presigned S3 link
generated_url = await s3_client.create_single_presigned_download_link(
return await s3_client.create_single_presigned_download_link(
bucket=s3_bucket_name,
object_key=s3_object_key,
expiration_secs=_PRESIGNED_LINK_EXPIRATION_SEC,
)
return f"{generated_url}"


async def get_osparc_credits_aggregated_usages_page(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
# pylint:disable=too-many-arguments

import os
from unittest.mock import Mock

Expand Down Expand Up @@ -25,25 +30,21 @@

@pytest.fixture
async def mocked_export(mocker: MockerFixture):
mock_export = mocker.patch(
return mocker.patch(
"simcore_service_resource_usage_tracker.services.resource_tracker_service_runs.ResourceTrackerRepository.export_service_runs_table_to_s3",
autospec=True,
)

return mock_export


@pytest.fixture
async def mocked_presigned_link(mocker: MockerFixture):
mock_presigned_link = mocker.patch(
return mocker.patch(
"simcore_service_resource_usage_tracker.services.resource_tracker_service_runs.SimcoreS3API.create_single_presigned_download_link",
return_value=TypeAdapter(AnyUrl).validate_python(
"https://www.testing.com/",
),
)

return mock_presigned_link


@pytest.fixture
async def enable_resource_usage_tracker_s3(
Expand Down Expand Up @@ -76,6 +77,6 @@ async def test_rpc_list_service_runs_which_was_billed(
user_id=_USER_ID,
product_name="osparc",
)
assert isinstance(download_url, AnyUrl)
assert isinstance(download_url, AnyUrl) # nosec
assert mocked_export.called
assert mocked_presigned_link.called
Loading