diff --git a/README.md b/README.md index 3b6ee6d953..c9bd94fee6 100644 --- a/README.md +++ b/README.md @@ -151,9 +151,9 @@ CREATE USER palace with password 'test'; grant all privileges on database circ to palace; ``` -#### Environment variables +### Environment variables -##### Database +#### Database To let the application know which database to use, set the `SIMPLIFIED_PRODUCTION_DATABASE` environment variable. @@ -161,7 +161,32 @@ To let the application know which database to use, set the `SIMPLIFIED_PRODUCTIO export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@localhost:5432/circ" ``` -##### Patron `Basic Token` authentication +#### Storage Service + +The application optionally uses a s3 compatible storage service to store files. To configure the application to use +a storage service, you can set the following environment variables: + +- `PALACE_STORAGE_PUBLIC_ACCESS_BUCKET`: Required if you want to use the storage service to serve files directly to + users. This is the name of the bucket that will be used to serve files. This bucket should be configured to allow + public access to the files. +- `PALACE_STORAGE_ANALYTICS_BUCKET`: Required if you want to use the storage service to store analytics data. +- `PALACE_STORAGE_ACCESS_KEY`: The access key (optional). + - If this key is set it will be passed to boto3 when connecting to the storage service. + - If it is not set boto3 will attempt to find credentials as outlined in their + [documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials). +- `PALACE_STORAGE_SECRET_KEY`: The secret key (optional). +- `PALACE_STORAGE_REGION`: The AWS region of the storage service (optional). +- `PALACE_STORAGE_ENDPOINT_URL`: The endpoint of the storage service (optional). This is used if you are using a + s3 compatible storage service like [minio](https://min.io/). +- `PALACE_STORAGE_URL_TEMPLATE`: The url template to use when generating urls for files stored in the storage service + (optional). + - The default value is `https://{bucket}.s3.{region}.amazonaws.com/{key}`. + - The following variables can be used in the template: + - `{bucket}`: The name of the bucket. + - `{key}`: The key of the file. + - `{region}`: The region of the storage service. + +#### Patron `Basic Token` authentication Enables/disables patron "basic token" authentication through setting the designated environment variable to any (case-insensitive) value of "true"/"yes"/"on"/"1" or "false"/"no"/"off"/"0", respectively. @@ -172,7 +197,7 @@ If the value is the empty string or the variable is not present in the environme export SIMPLIFIED_ENABLE_BASIC_TOKEN_AUTH=true ``` -##### Firebase Cloud Messaging +#### Firebase Cloud Messaging For Firebase Cloud Messaging (FCM) support (e.g., for notifications), `one` (and only one) of the following should be set: - `SIMPLIFIED_FCM_CREDENTIALS_JSON` - the JSON-format Google Cloud Platform (GCP) service account key or @@ -191,7 +216,7 @@ export SIMPLIFIED_FCM_CREDENTIALS_FILE="/opt/credentials/fcm_credentials.json" The FCM credentials can be downloaded once a Google Service account has been created. More details in the [FCM documentation](https://firebase.google.com/docs/admin/setup#set-up-project-and-service-account) -### Email sending +#### Email To use the features that require sending emails, for example to reset the password for logged-out users, you will need to have a working SMTP server and set some environment variables: @@ -204,7 +229,7 @@ export SIMPLIFIED_MAIL_PASSWORD=password export SIMPLIFIED_MAIL_SENDER=sender@example.com ``` -### Running the Application +## Running the Application As mentioned in the [pyenv](#pyenv) section, the `poetry` tool should be executed under a virtual environment in order to guarantee that it will use the Python version you expect. To use a particular Python version, diff --git a/api/admin/config.py b/api/admin/config.py index bf64393caf..a8ddac7736 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -18,7 +18,7 @@ class Configuration: APP_NAME = "Palace Collection Manager" PACKAGE_NAME = "@thepalaceproject/circulation-admin" - PACKAGE_VERSION = "1.8.0" + PACKAGE_VERSION = "1.9.0" STATIC_ASSETS = { "admin_js": "circulation-admin.js", diff --git a/api/admin/controller/__init__.py b/api/admin/controller/__init__.py index 32e82b095f..9f8912d6a4 100644 --- a/api/admin/controller/__init__.py +++ b/api/admin/controller/__init__.py @@ -50,7 +50,6 @@ def setup_admin_controllers(manager: CirculationManager): from api.admin.controller.sitewide_settings import ( SitewideConfigurationSettingsController, ) - from api.admin.controller.storage_services import StorageServicesController from api.admin.controller.timestamps import TimestampsController from api.admin.controller.view import ViewController from api.admin.controller.work_editor import WorkController @@ -100,7 +99,6 @@ def setup_admin_controllers(manager: CirculationManager): SearchServiceSelfTestsController(manager) ) manager.admin_search_services_controller = SearchServicesController(manager) - manager.admin_storage_services_controller = StorageServicesController(manager) manager.admin_catalog_services_controller = CatalogServicesController(manager) manager.admin_announcement_service = AnnouncementSettings(manager) manager.admin_search_controller = AdminSearchController(manager) diff --git a/api/admin/controller/analytics_services.py b/api/admin/controller/analytics_services.py index f966fcdb78..6025813506 100644 --- a/api/admin/controller/analytics_services.py +++ b/api/admin/controller/analytics_services.py @@ -6,8 +6,7 @@ from api.google_analytics_provider import GoogleAnalyticsProvider from api.s3_analytics_provider import S3AnalyticsProvider from core.local_analytics_provider import LocalAnalyticsProvider -from core.model import ExternalIntegration, ExternalIntegrationLink -from core.s3 import S3UploaderConfiguration +from core.model import ExternalIntegration from core.util import first_or_default from core.util.problem_detail import ProblemDetail @@ -33,11 +32,6 @@ def update_protocol_settings(self): ] ) - if s3_analytics_provider: - s3_analytics_provider[ - "settings" - ] = S3AnalyticsProvider.get_storage_settings(self._db) - def process_analytics_services(self): if flask.request.method == "GET": return self.process_get() @@ -101,14 +95,6 @@ def process_post(self): service.name = name - external_integration_link = self._set_storage_external_integration_link( - service, - ExternalIntegrationLink.ANALYTICS, - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY, - ) - if isinstance(external_integration_link, ProblemDetail): - return external_integration_link - if is_new: return Response(str(service.id), 201) else: diff --git a/api/admin/controller/catalog_services.py b/api/admin/controller/catalog_services.py index 1405aa798c..294a8358c6 100644 --- a/api/admin/controller/catalog_services.py +++ b/api/admin/controller/catalog_services.py @@ -6,15 +6,12 @@ from api.admin.problem_details import ( CANNOT_CHANGE_PROTOCOL, INTEGRATION_NAME_ALREADY_IN_USE, - MISSING_INTEGRATION, MISSING_SERVICE, MULTIPLE_SERVICES_FOR_LIBRARY, UNKNOWN_PROTOCOL, ) from core.marc import MARCExporter -from core.model import ExternalIntegration, get_one, get_one_or_create -from core.model.configuration import ExternalIntegrationLink -from core.s3 import S3UploaderConfiguration +from core.model import ExternalIntegration, get_one from core.util.problem_detail import ProblemDetail @@ -25,10 +22,6 @@ def __init__(self, manager): self.protocols = self._get_integration_protocols( service_apis, protocol_name_attr="NAME" ) - self.update_protocol_settings() - - def update_protocol_settings(self): - self.protocols[0]["settings"] = [MARCExporter.get_storage_settings(self._db)] def process_catalog_services(self): self.require_system_admin() @@ -42,7 +35,6 @@ def process_get(self): services = self._get_integration_info( ExternalIntegration.CATALOG_GOAL, self.protocols ) - self.update_protocol_settings() return dict( catalog_services=services, protocols=self.protocols, @@ -91,10 +83,6 @@ def process_post(self): if isinstance(result, ProblemDetail): return result - external_integration_link = self._set_external_integration_link(service) - if isinstance(external_integration_link, ProblemDetail): - return external_integration_link - library_error = self.check_libraries(service) if library_error: self._db.rollback() @@ -105,39 +93,6 @@ def process_post(self): else: return Response(str(service.id), 200) - def _set_external_integration_link(self, service): - """Either set or delete the external integration link between the - service and the storage integration. - """ - mirror_integration_id = flask.request.form.get("mirror_integration_id") - - # If no storage integration was selected, then delete the existing - # external integration link. - current_integration_link, ignore = get_one_or_create( - self._db, - ExternalIntegrationLink, - library_id=None, - external_integration_id=service.id, - purpose=ExternalIntegrationLink.MARC, - ) - - if mirror_integration_id == self.NO_MIRROR_INTEGRATION: - if current_integration_link: - self._db.delete(current_integration_link) - else: - storage_integration = get_one( - self._db, ExternalIntegration, id=mirror_integration_id - ) - # Only get storage integrations that have a MARC file option set - if ( - not storage_integration - or not storage_integration.setting( - S3UploaderConfiguration.MARC_BUCKET_KEY - ).value - ): - return MISSING_INTEGRATION - current_integration_link.other_integration_id = storage_integration.id - def validate_form_fields(self, protocol): """Verify that the protocol which the user has selected is in the list of recognized protocol options.""" diff --git a/api/admin/controller/storage_services.py b/api/admin/controller/storage_services.py deleted file mode 100644 index 5ef8cf681c..0000000000 --- a/api/admin/controller/storage_services.py +++ /dev/null @@ -1,72 +0,0 @@ -import flask -from flask import Response - -from api.admin.controller.settings import SettingsController -from api.admin.problem_details import CANNOT_CHANGE_PROTOCOL, MISSING_SERVICE - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core.mirror import MirrorUploader # noqa: autoflake -from core.model import ExternalIntegration, get_one -from core.util.problem_detail import ProblemDetail - - -class StorageServicesController(SettingsController): - def __init__(self, manager): - super().__init__(manager) - self.goal = ExternalIntegration.STORAGE_GOAL - self.protocols = self._get_integration_protocols( - list(MirrorUploader.IMPLEMENTATION_REGISTRY.values()), - protocol_name_attr="NAME", - ) - - def process_services(self): - if flask.request.method == "GET": - return self.process_get() - else: - return self.process_post() - - def process_get(self): - services = self._get_integration_info(self.goal, self.protocols) - return dict(storage_services=services, protocols=self.protocols) - - def process_post(self): - protocol = flask.request.form.get("protocol") - name = flask.request.form.get("name") - is_new = False - protocol_error = self.validate_protocol() - if protocol_error: - return protocol_error - - id = flask.request.form.get("id") - if id: - # Find an existing service to edit - storage_service = get_one( - self._db, ExternalIntegration, id=id, goal=self.goal - ) - if not storage_service: - return MISSING_SERVICE - if protocol != storage_service.protocol: - return CANNOT_CHANGE_PROTOCOL - else: - # Create a new service - storage_service, is_new = self._create_integration( - self.protocols, protocol, self.goal - ) - if isinstance(storage_service, ProblemDetail): - self._db.rollback() - return storage_service - - protocol_error = self.set_protocols(storage_service, protocol, self.protocols) - - if protocol_error: - self._db.rollback() - return protocol_error - storage_service.name = name - - if is_new: - return Response(str(storage_service.id), 201) - else: - return Response(str(storage_service.id), 200) - - def process_delete(self, service_id): - return self._delete_integration(service_id, ExternalIntegration.STORAGE_GOAL) diff --git a/api/admin/routes.py b/api/admin/routes.py index 41c218cc98..900096adbb 100644 --- a/api/admin/routes.py +++ b/api/admin/routes.py @@ -493,22 +493,6 @@ def search_service_self_tests(identifier): ) -@app.route("/admin/storage_services", methods=["GET", "POST"]) -@returns_json_or_response_or_problem_detail -@requires_admin -@requires_csrf_token -def storage_services(): - return app.manager.admin_storage_services_controller.process_services() - - -@app.route("/admin/storage_service/", methods=["DELETE"]) -@returns_json_or_response_or_problem_detail -@requires_admin -@requires_csrf_token -def storage_service(service_id): - return app.manager.admin_storage_services_controller.process_delete(service_id) - - @app.route("/admin/catalog_services", methods=["GET", "POST"]) @returns_json_or_response_or_problem_detail @requires_admin diff --git a/api/app.py b/api/app.py index 018dda07d0..d380beed10 100644 --- a/api/app.py +++ b/api/app.py @@ -17,6 +17,7 @@ SessionManager, pg_advisory_lock, ) +from core.service.container import container_instance from core.util import LanguageCodes from core.util.cache import CachedData from scripts import InstanceInitializationScript @@ -72,8 +73,9 @@ def initialize_circulation_manager(): pass else: if getattr(app, "manager", None) is None: + container = container_instance() try: - app.manager = CirculationManager(app._db) + app.manager = CirculationManager(app._db, container) except Exception: logging.exception("Error instantiating circulation manager!") raise diff --git a/api/circulation.py b/api/circulation.py index 9765b0801e..12fc47a4ff 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -15,7 +15,6 @@ Generic, List, Literal, - Optional, Tuple, Type, TypeVar, @@ -25,7 +24,7 @@ from flask import Response from flask_babel import lazy_gettext as _ from pydantic import PositiveInt -from sqlalchemy.orm import Query, Session +from sqlalchemy.orm import Query from core.analytics import Analytics from core.config import CannotLoadConfiguration diff --git a/api/controller.py b/api/controller.py index 6b07889caf..4e1c90dd80 100644 --- a/api/controller.py +++ b/api/controller.py @@ -87,6 +87,7 @@ from core.opds2 import AcquisitonFeedOPDS2 from core.opensearch import OpenSearchDocument from core.query.playtime_entries import PlaytimeEntries +from core.service.container import Services from core.user_profile import ProfileController as CoreProfileController from core.util.authentication_for_opds import AuthenticationForOPDSDocument from core.util.datetime_helpers import utc_now @@ -166,7 +167,6 @@ from api.admin.controller.sitewide_settings import ( SitewideConfigurationSettingsController, ) - from api.admin.controller.storage_services import StorageServicesController from api.admin.controller.timestamps import TimestampsController from api.admin.controller.view import ViewController from api.admin.controller.work_editor import WorkController as AdminWorkController @@ -220,14 +220,14 @@ class CirculationManager: admin_logging_services_controller: LoggingServicesController admin_search_service_self_tests_controller: SearchServiceSelfTestsController admin_search_services_controller: SearchServicesController - admin_storage_services_controller: StorageServicesController admin_catalog_services_controller: CatalogServicesController admin_announcement_service: AnnouncementSettings admin_search_controller: AdminSearchController admin_view_controller: ViewController - def __init__(self, _db): + def __init__(self, _db, services: Services): self._db = _db + self.services = services self.site_configuration_last_update = ( Configuration.site_configuration_last_update(self._db, timeout=0) ) diff --git a/api/discovery/registration_script.py b/api/discovery/registration_script.py index 4d75cd7b5f..30c1bfd7c5 100644 --- a/api/discovery/registration_script.py +++ b/api/discovery/registration_script.py @@ -67,7 +67,7 @@ def do_run( # Set up an application context so we have access to url_for. from api.app import app - app.manager = manager or CirculationManager(self._db) + app.manager = manager or CirculationManager(self._db, self.services) base_url = ConfigurationSetting.sitewide( self._db, Configuration.BASE_URL_KEY ).value diff --git a/api/google_analytics_provider.py b/api/google_analytics_provider.py index eebf0a3a5d..09c82a72c8 100644 --- a/api/google_analytics_provider.py +++ b/api/google_analytics_provider.py @@ -6,6 +6,7 @@ from flask_babel import lazy_gettext as _ from core.model import ConfigurationSetting, ExternalIntegration, Session +from core.service.container import Services from core.util.http import HTTP from .config import CannotLoadConfiguration @@ -63,7 +64,7 @@ class GoogleAnalyticsProvider: {"key": TRACKING_ID, "label": _("Tracking ID"), "required": True}, ] - def __init__(self, integration, library=None): + def __init__(self, integration, services: Services, library=None): _db = Session.object_session(integration) if not library: raise CannotLoadConfiguration( diff --git a/api/s3_analytics_provider.py b/api/s3_analytics_provider.py index f294cc3a5d..8ddff535cc 100644 --- a/api/s3_analytics_provider.py +++ b/api/s3_analytics_provider.py @@ -1,51 +1,23 @@ import datetime import json -from typing import Any, Dict, List +import random +import string +from typing import Dict, Optional from flask_babel import lazy_gettext as _ from sqlalchemy.orm import Session from core.config import CannotLoadConfiguration from core.local_analytics_provider import LocalAnalyticsProvider -from core.mirror import MirrorUploader -from core.model import ( - ExternalIntegration, - Library, - LicensePool, - MediaTypes, - Representation, - get_one, -) -from core.model.configuration import ( - ConfigurationAttributeType, - ConfigurationGrouping, - ConfigurationMetadata, - ConfigurationOption, - ExternalIntegrationLink, -) -from core.s3 import S3Uploader, S3UploaderConfiguration +from core.model import Library, LicensePool, MediaTypes +from core.model.configuration import ConfigurationGrouping +from core.service.container import Services +from core.service.storage.s3 import S3Service class S3AnalyticsProviderConfiguration(ConfigurationGrouping): """Contains configuration settings of the S3 Analytics provider.""" - NO_MIRROR_INTEGRATION = "NO_MIRROR" - - DEFAULT_MIRROR_OPTION = ConfigurationOption(NO_MIRROR_INTEGRATION, "None") - - analytics_mirror = ConfigurationMetadata( - key="mirror_integration_id", - label=_("Analytics Mirror"), - description=_( - "S3-compatible service to use for storing analytics events. " - "The service must already be configured under 'Storage Services'." - ), - type=ConfigurationAttributeType.SELECT, - required=True, - default=NO_MIRROR_INTEGRATION, - options=[DEFAULT_MIRROR_OPTION], - ) - class S3AnalyticsProvider(LocalAnalyticsProvider): """Analytics provider storing data in a S3 bucket.""" @@ -57,6 +29,14 @@ class S3AnalyticsProvider(LocalAnalyticsProvider): LocalAnalyticsProvider.SETTINGS + S3AnalyticsProviderConfiguration.to_settings() ) + def __init__( + self, + integration, + services: Services, + library=None, + ): + super().__init__(integration, services, library) + @staticmethod def _create_event_object( library: Library, @@ -223,95 +203,59 @@ def collect_event( default=str, ensure_ascii=True, ) - s3_uploader: S3Uploader = self._get_s3_uploader(_db) - analytics_file_url = s3_uploader.analytics_file_url( - library, license_pool, event_type, time - ) + storage = self._get_storage() + analytics_file_key = self._get_file_key(library, license_pool, event_type, time) - # Create a temporary Representation object because S3Uploader can work only with Representation objects. - # NOTE: It won't be stored in the database. - representation = Representation( - media_type=MediaTypes.APPLICATION_JSON_MEDIA_TYPE, content=content + storage.store( + analytics_file_key, + content, + MediaTypes.APPLICATION_JSON_MEDIA_TYPE, ) - s3_uploader.mirror_one(representation, analytics_file_url) - - def _get_s3_uploader(self, db: Session) -> S3Uploader: - """Get an S3Uploader object associated with the provider's selected storage service. - :param db: Database session - - :return: S3Uploader object associated with the provider's selected storage service - """ - # To find the storage integration for the exporter, first find the - # external integration link associated with the provider's external - # integration. - integration_link = get_one( - db, - ExternalIntegrationLink, - external_integration_id=self.integration_id, - purpose=ExternalIntegrationLink.ANALYTICS, - ) - - if not integration_link: - raise CannotLoadConfiguration( - "The provider doesn't have an associated storage service" - ) - - # Then use the "other" integration value to find the storage integration. - storage_integration = get_one( - db, ExternalIntegration, id=integration_link.other_integration_id + def _get_file_key( + self, + library: Library, + license_pool: Optional[LicensePool], + event_type: str, + end_time: datetime.datetime, + start_time: Optional[datetime.datetime] = None, + ): + """The path to the analytics data file for the given library, license + pool and date range.""" + root = library.short_name + if start_time: + time_part = str(start_time) + "-" + str(end_time) + else: + time_part = str(end_time) + + # ensure the uniqueness of file name (in case of overlapping events) + collection = license_pool.collection_id if license_pool else "NONE" + random_string = "".join(random.choices(string.ascii_lowercase, k=10)) + file_name = "-".join([time_part, event_type, str(collection), random_string]) + # nest file in directories that allow for easy purging by year, month or day + return "/".join( + [ + str(root), + str(end_time.year), + str(end_time.month), + str(end_time.day), + file_name + ".json", + ] ) - if not storage_integration: - raise CannotLoadConfiguration( - "The provider doesn't have an associated storage service" - ) - - analytics_bucket = storage_integration.setting( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ).value + def _get_storage(self) -> S3Service: + """Return the CMs configured storage service. + Raises an exception if the storage service is not configured. - if not analytics_bucket: + :return: StorageServiceBase object + """ + s3_storage_service = self.services.storage.analytics() + if s3_storage_service is None: raise CannotLoadConfiguration( - "The associated storage service does not have {} bucket".format( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ) + "No storage service is configured with an analytics bucket." ) - s3_uploader = MirrorUploader.implementation(storage_integration) - - return s3_uploader - - @classmethod - def get_storage_settings(cls, db: Session) -> List[Dict[str, Any]]: - """Return the provider's configuration settings including available storage options. - - :param db: Database session - - :return: List containing the provider's configuration settings - """ - storage_integrations = ExternalIntegration.for_goal( - db, ExternalIntegration.STORAGE_GOAL - ) - - for storage_integration in storage_integrations: - configuration_settings = [ - setting - for setting in storage_integration.settings - if setting.key == S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ] - - if configuration_settings: - if configuration_settings[0].value: - S3AnalyticsProviderConfiguration.analytics_mirror.options.append( - ConfigurationOption( - storage_integration.id, storage_integration.name - ) - ) - - cls.SETTINGS = S3AnalyticsProviderConfiguration.to_settings() - - return cls.SETTINGS + return s3_storage_service Provider = S3AnalyticsProvider diff --git a/bin/odl2_import_monitor b/bin/odl2_import_monitor index 1a0bbe8b5b..ca1871f437 100755 --- a/bin/odl2_import_monitor +++ b/bin/odl2_import_monitor @@ -11,9 +11,6 @@ sys.path.append(os.path.abspath(package_dir)) from webpub_manifest_parser.odl import ODLFeedParserFactory from api.odl2 import ODL2Importer, ODL2ImportMonitor - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa from core.opds2_import import RWPMManifestParser from core.scripts import RunCollectionMonitorScript diff --git a/bin/odl2_schema_validate b/bin/odl2_schema_validate index bb167ba925..f7972efc14 100755 --- a/bin/odl2_schema_validate +++ b/bin/odl2_schema_validate @@ -11,9 +11,6 @@ sys.path.append(os.path.abspath(package_dir)) from webpub_manifest_parser.odl import ODLFeedParserFactory from api.odl2 import ODL2Importer - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa: autoflake from core.opds2_import import RWPMManifestParser from core.opds_schema import ODL2SchemaValidation from core.scripts import RunCollectionMonitorScript diff --git a/bin/odl_import_monitor b/bin/odl_import_monitor index dc7c536b53..aa1b5cd332 100755 --- a/bin/odl_import_monitor +++ b/bin/odl_import_monitor @@ -9,9 +9,6 @@ package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) from api.odl import ODLImporter, ODLImportMonitor - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa from core.scripts import RunCollectionMonitorScript RunCollectionMonitorScript( diff --git a/bin/opds2_import_monitor b/bin/opds2_import_monitor index 29bcf46810..3223ba6cd0 100755 --- a/bin/opds2_import_monitor +++ b/bin/opds2_import_monitor @@ -9,8 +9,6 @@ sys.path.append(os.path.abspath(package_dir)) from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa: autoflake from core.model import ExternalIntegration from core.opds2_import import OPDS2Importer, OPDS2ImportMonitor, RWPMManifestParser from core.scripts import OPDSImportScript diff --git a/bin/opds2_schema_validate b/bin/opds2_schema_validate index afd4a48fd0..070507d428 100755 --- a/bin/opds2_schema_validate +++ b/bin/opds2_schema_validate @@ -10,8 +10,6 @@ sys.path.append(os.path.abspath(package_dir)) from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa: autoflake from core.model.configuration import ExternalIntegration from core.opds2_import import OPDS2Importer, RWPMManifestParser from core.opds_schema import OPDS2SchemaValidation diff --git a/bin/opds_import_monitor b/bin/opds_import_monitor index 5b21aeb5c9..b18022933c 100755 --- a/bin/opds_import_monitor +++ b/bin/opds_import_monitor @@ -8,8 +8,6 @@ bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa: autoflake from core.scripts import OPDSImportScript OPDSImportScript().run() diff --git a/core/analytics.py b/core/analytics.py index d8b4f94391..4c1d9e27f3 100644 --- a/core/analytics.py +++ b/core/analytics.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib import logging from collections import defaultdict @@ -7,6 +9,7 @@ from .config import CannotLoadConfiguration from .model import ExternalIntegration +from .service.container import container_instance from .util.datetime_helpers import utc_now from .util.log import log_elapsed_time @@ -26,7 +29,7 @@ class Analytics: GLOBAL_ENABLED: Optional[bool] = None LIBRARY_ENABLED: Set[int] = set() - def __new__(cls, _db, refresh=False) -> "Analytics": + def __new__(cls, _db: Session, refresh: bool = False) -> Analytics: instance = cls._singleton_instance if instance is None: refresh = True @@ -44,15 +47,16 @@ def _reset_singleton_instance(cls): cls._singleton_instance = None @log_elapsed_time(log_method=log.debug, message_prefix="Initializing instance") - def _initialize_instance(self, _db): + def _initialize_instance(self, _db: Session) -> None: """Initialize an instance (usually the singleton) of the class. We don't use __init__ because it would be run whether or not a new instance were instantiated. """ + services = container_instance() sitewide_providers = [] library_providers = defaultdict(list) - initialization_exceptions: Dict[int, Exception] = {} + initialization_exceptions: Dict[int, Exception | str] = {} global_enabled = False library_enabled = set() # Find a list of all the ExternalIntegrations set up with a @@ -68,12 +72,12 @@ def _initialize_instance(self, _db): provider_class = self._provider_class_from_module(module) if provider_class: if not libraries: - provider = provider_class(integration) + provider = provider_class(integration, services) sitewide_providers.append(provider) global_enabled = True else: for library in libraries: - provider = provider_class(integration, library) + provider = provider_class(integration, services, library) library_providers[library.id].append(provider) library_enabled.add(library.id) else: diff --git a/core/local_analytics_provider.py b/core/local_analytics_provider.py index 207972591c..4b40c497bf 100644 --- a/core/local_analytics_provider.py +++ b/core/local_analytics_provider.py @@ -2,6 +2,7 @@ from sqlalchemy.orm.session import Session from .model import CirculationEvent, ExternalIntegration, create, get_one +from .service.container import Services class LocalAnalyticsProvider: @@ -41,12 +42,13 @@ class LocalAnalyticsProvider: }, ] - def __init__(self, integration, library=None): + def __init__(self, integration, services: Services, library=None): self.integration_id = integration.id self.location_source = ( integration.setting(self.LOCATION_SOURCE).value or self.LOCATION_SOURCE_DISABLED ) + self.services = services if library: self.library_id = library.id else: diff --git a/core/marc.py b/core/marc.py index ccb1d89472..1157e5c86c 100644 --- a/core/marc.py +++ b/core/marc.py @@ -1,5 +1,6 @@ import re from io import BytesIO +from typing import Optional from flask_babel import lazy_gettext as _ from pymarc import Field, Record, Subfield @@ -9,7 +10,6 @@ from .config import CannotLoadConfiguration from .external_search import ExternalSearchIndex, SortKeyPagination from .lane import BaseFacets, Lane -from .mirror import MirrorUploader from .model import ( CachedMARCFile, DeliveryMechanism, @@ -20,11 +20,7 @@ Work, get_one_or_create, ) - -# this is necessary to ensure these implementations are registered -from .s3 import MinIOUploader, S3Uploader # noqa: autoflake - -# registered +from .service.storage.s3 import MultipartS3ContextManager, S3Service from .util import LanguageCodes from .util.datetime_helpers import utc_now @@ -572,20 +568,6 @@ class MARCExporter: }, ] - NO_MIRROR_INTEGRATION = "NO_MIRROR" - DEFAULT_MIRROR_INTEGRATION = dict( - key=NO_MIRROR_INTEGRATION, label=_("None - Do not mirror MARC files") - ) - SETTING = { - "key": "mirror_integration_id", - "label": _("MARC Mirror"), - "description": _( - "Storage protocol to use for uploading generated MARC files. The service must already be configured under 'Storage Services'." - ), - "type": "select", - "options": [DEFAULT_MIRROR_INTEGRATION], - } - @classmethod def from_config(cls, library): _db = Session.object_session(library) @@ -606,27 +588,6 @@ def __init__(self, _db, library, integration): self.library = library self.integration = integration - @classmethod - def get_storage_settings(cls, _db): - integrations = ExternalIntegration.for_goal( - _db, ExternalIntegration.STORAGE_GOAL - ) - cls.SETTING["options"] = [cls.DEFAULT_MIRROR_INTEGRATION] - for integration in integrations: - # Only add an integration to choose from if it has a - # MARC File Bucket field in its settings. - configuration_settings = [ - s for s in integration.settings if s.key == "marc_bucket" - ] - - if configuration_settings: - if configuration_settings[0].value: - cls.SETTING["options"].append( - dict(key=str(integration.id), label=integration.name) - ) - - return cls.SETTING - @classmethod def create_record(cls, work, annotator, force_create=False, integration=None): """Build a complete MARC record for a given work.""" @@ -674,14 +635,24 @@ def create_record(cls, work, annotator, force_create=False, integration=None): ) return record + def _file_key(self, library, lane, end_time, start_time=None): + """The path to the hosted MARC file for the given library, lane, + and date range.""" + root = library.short_name + if start_time: + time_part = str(start_time) + "-" + str(end_time) + else: + time_part = str(end_time) + parts = [root, time_part, lane.display_name] + return "/".join(parts) + ".mrc" + def records( self, lane, annotator, - mirror_integration, + storage_service: Optional[S3Service], start_time=None, force_refresh=False, - mirror=None, search_engine=None, query_batch_size=500, upload_batch_size=7500, @@ -691,10 +662,9 @@ def records( :param lane: The Lane to export books from. :param annotator: The Annotator to use when creating MARC records. - :param mirror_integration: The mirror integration to use for MARC files. + :param storage_service: The storage service integration to use for MARC files. :param start_time: Only include records that were created or modified after this time. :param force_refresh: Create new records even when cached records are available. - :param mirror: Optional mirror to use instead of loading one from configuration. :param query_batch_size: Number of works to retrieve with a single Opensearch query. :param upload_batch_size: Number of records to mirror at a time. This is different from query_batch_size because S3 enforces a minimum size of 5MB for all parts @@ -702,18 +672,10 @@ def records( works for a single query. """ - # We mirror the content, if it's not empty. If it's empty, we create a CachedMARCFile - # and Representation, but don't actually mirror it. - if not mirror: - storage_protocol = mirror_integration.protocol - mirror = MirrorUploader.implementation(mirror_integration) - if mirror.NAME != storage_protocol: - raise Exception( - "Mirror integration does not match configured storage protocol" - ) - - if not mirror: - raise Exception("No mirror integration is configured") + # We store the content, if it's not empty. If it's empty, we create a CachedMARCFile + # and Representation, but don't actually store it. + if storage_service is None: + raise Exception("No storage service is configured") search_engine = search_engine or ExternalSearchIndex(self._db) @@ -725,12 +687,12 @@ def records( facets = MARCExporterFacets(start_time=start_time) pagination = SortKeyPagination(size=query_batch_size) - url = mirror.marc_file_url(self.library, lane, end_time, start_time) - representation, ignore = get_one_or_create( - self._db, Representation, url=url, media_type=Representation.MARC_MEDIA_TYPE - ) + key = self._file_key(self.library, lane, end_time, start_time) - with mirror.multipart_upload(representation, url) as upload: + with storage_service.multipart( + key, + content_type=Representation.MARC_MEDIA_TYPE, + ) as upload: this_batch = BytesIO() this_batch_size = 0 while pagination is not None: @@ -752,7 +714,7 @@ def records( this_batch_size += pagination.this_page_size if this_batch_size >= upload_batch_size: # We've reached or exceeded the upload threshold. - # Upload one part of the multi-part document. + # Upload one part of the multipart document. self._upload_batch(this_batch, upload) this_batch = BytesIO() this_batch_size = 0 @@ -760,10 +722,16 @@ def records( # Upload the final part of the multi-document, if # necessary. - self._upload_batch(this_batch, upload) + self._upload_batch(this_batch, upload) # type: ignore[unreachable] + representation, ignore = get_one_or_create( + self._db, + Representation, + url=upload.url, + media_type=Representation.MARC_MEDIA_TYPE, + ) representation.fetched_at = end_time - if not representation.mirror_exception: + if not upload.exception: cached, is_new = get_one_or_create( self._db, CachedMARCFile, @@ -775,8 +743,11 @@ def records( if not is_new: cached.representation = representation cached.end_time = end_time + representation.set_as_mirrored(upload.url) + else: + representation.mirror_exception = str(upload.exception) - def _upload_batch(self, output, upload): + def _upload_batch(self, output: BytesIO, upload: MultipartS3ContextManager): "Upload a batch of MARC records as one part of a multi-part upload." content = output.getvalue() if content: diff --git a/core/mirror.py b/core/mirror.py deleted file mode 100644 index c082b40158..0000000000 --- a/core/mirror.py +++ /dev/null @@ -1,202 +0,0 @@ -from __future__ import annotations - -from abc import ABCMeta, abstractmethod -from typing import TYPE_CHECKING -from urllib.parse import urlsplit - -from .config import CannotLoadConfiguration -from .util.datetime_helpers import utc_now - -if TYPE_CHECKING: - from .model import Collection, ExternalIntegration, Representation - - -class MirrorUploader(metaclass=ABCMeta): - """Handles the job of uploading a representation's content to - a mirror that we control. - """ - - STORAGE_GOAL = "storage" - - # Depending on the .protocol of an ExternalIntegration with - # .goal=STORAGE, a different subclass might be initialized by - # sitewide() or for_collection(). A subclass that wants to take - # advantage of this should add a mapping here from its .protocol - # to itself. - IMPLEMENTATION_REGISTRY: dict[str, type[MirrorUploader]] = {} - - @classmethod - def mirror(cls, _db, storage_name=None, integration=None): - """Create a MirrorUploader from an integration or storage name. - - :param storage_name: The name of the storage integration. - :param integration: The external integration. - - :return: A MirrorUploader. - - :raise: CannotLoadConfiguration if no integration with - goal==STORAGE_GOAL is configured. - """ - if not integration: - integration = cls.integration_by_name(_db, storage_name) - return cls.implementation(integration) - - @classmethod - def integration_by_name(cls, _db, storage_name=None): - """Find the ExternalIntegration for the mirror by storage name.""" - from .model import ExternalIntegration - - qu = _db.query(ExternalIntegration).filter( - ExternalIntegration.goal == cls.STORAGE_GOAL, - ExternalIntegration.name == storage_name, - ) - integrations = qu.all() - if not integrations: - raise CannotLoadConfiguration( - "No storage integration with name '%s' is configured." % storage_name - ) - - [integration] = integrations - return integration - - @classmethod - def for_collection(cls, collection, purpose): - """Create a MirrorUploader for the given Collection. - - :param collection: Use the mirror configuration for this Collection. - :param purpose: Use the purpose of the mirror configuration. - - :return: A MirrorUploader, or None if the Collection has no - mirror integration. - """ - from .model import ExternalIntegration - - try: - from .model import Session - - _db = Session.object_session(collection) - integration = ExternalIntegration.for_collection_and_purpose( - _db, collection, purpose - ) - except CannotLoadConfiguration as e: - return None - return cls.implementation(integration) - - @classmethod - def implementation(cls, integration): - """Instantiate the appropriate implementation of MirrorUploader - for the given ExternalIntegration. - """ - if not integration: - return None - implementation_class = cls.IMPLEMENTATION_REGISTRY.get( - integration.protocol, cls - ) - return implementation_class(integration) - - def __init__(self, integration: ExternalIntegration, host: str): - """Instantiate a MirrorUploader from an ExternalIntegration. - - :param integration: An ExternalIntegration configuring the credentials - used to upload things. - :param host: Base host used by the mirror - """ - if integration.goal != self.STORAGE_GOAL: - # This collection's 'mirror integration' isn't intended to - # be used to mirror anything. - raise CannotLoadConfiguration( - "Cannot create an MirrorUploader from an integration with goal=%s" - % integration.goal - ) - - self._host = host - - # Subclasses will override this to further configure the client - # based on the credentials in the ExternalIntegration. - - def do_upload(self, representation): - raise NotImplementedError() - - def mirror_one( - self, - representation: Representation, - mirror_to: str, - collection: Collection | None = None, - ): - """Mirror a single Representation. - - :param representation: Book's representation - :param mirror_to: Mirror URL - :param collection: Collection - """ - now = utc_now() - exception = self.do_upload(representation) - representation.mirror_exception = exception - if exception: - representation.mirrored_at = None - else: - representation.mirrored_at = now - - def mirror_batch(self, representations): - """Mirror a batch of Representations at once.""" - - for representation in representations: - self.mirror_one(representation, "") - - def book_url( - self, - identifier, - extension=".epub", - open_access=True, - data_source=None, - title=None, - ): - """The URL of the hosted EPUB file for the given identifier. - - This does not upload anything to the URL, but it is expected - that calling mirror() on a certain Representation object will - make that representation end up at that URL. - """ - raise NotImplementedError() - - def cover_image_url(self, data_source, identifier, filename=None, scaled_size=None): - """The URL of the hosted cover image for the given identifier. - - This does not upload anything to the URL, but it is expected - that calling mirror() on a certain Representation object will - make that representation end up at that URL. - """ - raise NotImplementedError() - - def sign_url(self, url: str, expiration: int | None = None) -> str: - """Signs a URL and make it expirable - - :param url: URL - :param expiration: (Optional) Time in seconds for the presigned URL to remain valid. - Default value depends on a specific implementation - :return: Signed expirable link - """ - raise NotImplementedError() - - def is_self_url(self, url: str) -> bool: - """Determines whether the URL has the mirror's host or a custom domain - - :param url: The URL - :return: Boolean value indicating whether the URL has the mirror's host or a custom domain - """ - scheme, netloc, path, query, fragment = urlsplit(url) - - if netloc.endswith(self._host): - return True - else: - return False - - @abstractmethod - def split_url(self, url: str, unquote: bool = True) -> tuple[str, str]: - """Splits the URL into the components: container (bucket) and file path - - :param url: URL - :param unquote: Boolean value indicating whether it's required to unquote URL elements - :return: Tuple (bucket, file path) - """ - raise NotImplementedError() diff --git a/core/mock_analytics_provider.py b/core/mock_analytics_provider.py index 8742b94063..734373028f 100644 --- a/core/mock_analytics_provider.py +++ b/core/mock_analytics_provider.py @@ -1,7 +1,16 @@ class MockAnalyticsProvider: """A mock analytics provider that keeps track of how many times it's called.""" - def __init__(self, integration=None, library=None): + def __init__(self, integration=None, services=None, library=None): + """ + Since this is a mock analytics provider, it doesn't need to do anything + with the integration or services. It just needs to keep track of how + many times it's called. + + :param integration: The ExternalIntegration that configures this analytics service. + :param services: The Service object that provides services to this provider. + :param library: The library this analytics provider is associated with. + """ self.count = 0 self.event = None if integration: diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py index 3fafcfee6b..4de95da13a 100644 --- a/core/model/cachedfeed.py +++ b/core/model/cachedfeed.py @@ -1,9 +1,10 @@ # CachedFeed, WillNotGenerateExpensiveFeed +from __future__ import annotations import datetime import logging from collections import namedtuple -from typing import Optional +from typing import TYPE_CHECKING, Optional from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, Unicode from sqlalchemy.orm import Mapped, relationship @@ -14,6 +15,9 @@ from . import Base, flush, get_one, get_one_or_create from .work import Work +if TYPE_CHECKING: + from . import Representation + # This named tuple makes it easy to manage the return value of # CachedFeed._prepare_keys. CachedFeedKeys = namedtuple( @@ -93,7 +97,7 @@ def fetch( refresher_method, max_age=None, raw=False, - **response_kwargs + **response_kwargs, ): """Retrieve a cached feed from the database if possible. @@ -415,6 +419,9 @@ class CachedMARCFile(Base): representation_id = Column( Integer, ForeignKey("representations.id"), nullable=False ) + representation: Mapped[Representation] = relationship( + "Representation", back_populates="marc_file" + ) start_time = Column(DateTime(timezone=True), nullable=True, index=True) end_time = Column(DateTime(timezone=True), nullable=True, index=True) diff --git a/core/model/collection.py b/core/model/collection.py index df102b004d..a3a83329fd 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -948,10 +948,7 @@ def delete(self, search_index=None): # Collection, assuming it wasn't deleted already. if self.external_integration: for link in self.external_integration.links: - if ( - link.other_integration - and link.other_integration.goal == ExternalIntegration.STORAGE_GOAL - ): + if link.other_integration and link.other_integration.goal == "storage": logging.info( f"Deletion of collection {self.name} is disassociating " f"storage integration {link.other_integration.name}." diff --git a/core/model/configuration.py b/core/model/configuration.py index fcfe36d79e..696368053e 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -18,7 +18,6 @@ from core.model.hybrid import hybrid_property from ..config import CannotLoadConfiguration, Configuration -from ..mirror import MirrorUploader from ..util.string_helpers import random_string from . import Base, get_one, get_one_or_create from .constants import DataSourceConstants @@ -89,10 +88,6 @@ class ExternalIntegration(Base): # but not the books themselves. METADATA_GOAL = "metadata" - # These integrations are associated with external services such as - # S3 that provide access to book covers. - STORAGE_GOAL = MirrorUploader.STORAGE_GOAL - # These integrations are associated with external services such as # Opensearch that provide indexed search. SEARCH_GOAL = "search" @@ -164,11 +159,6 @@ class ExternalIntegration(Base): NYT = "New York Times" CONTENT_SERVER = "Content Server" - # Integrations with STORAGE_GOAL - S3 = "Amazon S3" - MINIO = "MinIO" - LCP = "LCP" - # Integrations with SEARCH_GOAL OPENSEARCH = "Opensearch" diff --git a/core/model/resource.py b/core/model/resource.py index 299839862e..8423e9fa52 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -546,7 +546,7 @@ class Representation(Base, MediaTypes): # A Representation may be a CachedMARCFile. marc_file: Mapped[CachedMARCFile] = relationship( "CachedMARCFile", - backref="representation", + back_populates="representation", cascade="all, delete-orphan", ) diff --git a/core/s3.py b/core/s3.py deleted file mode 100644 index 89c9797883..0000000000 --- a/core/s3.py +++ /dev/null @@ -1,881 +0,0 @@ -import functools -import logging -import random -import string -from contextlib import contextmanager -from datetime import datetime -from enum import Enum -from typing import Any, List, Optional, Tuple -from urllib.parse import quote, unquote_plus, urlsplit - -import boto3 -import botocore -from botocore.config import Config -from botocore.exceptions import BotoCoreError, ClientError -from flask_babel import lazy_gettext as _ - -from .mirror import MirrorUploader -from .model import Collection, ExternalIntegration, Library, LicensePool, Representation -from .model.configuration import ( - ConfigurationAttributeType, - ConfigurationGrouping, - ConfigurationMetadata, - ConfigurationOption, -) - - -class MultipartS3Upload: - def __init__(self, uploader, representation, mirror_to): - self.uploader = uploader - self.representation = representation - self.bucket, self.filename = uploader.split_url(mirror_to) - media_type = representation.external_media_type - self.part_number = 1 - self.parts = [] - - self.upload = uploader.client.create_multipart_upload( - Bucket=self.bucket, - Key=self.filename, - ContentType=media_type, - ) - - def upload_part(self, content): - logging.info(f"Uploading part {self.part_number} of {self.filename}") - result = self.uploader.client.upload_part( - Body=content, - Bucket=self.bucket, - Key=self.filename, - PartNumber=self.part_number, - UploadId=self.upload.get("UploadId"), - ) - self.parts.append(dict(ETag=result.get("ETag"), PartNumber=self.part_number)) - self.part_number += 1 - - def complete(self): - if not self.parts: - logging.info("Upload of %s was empty, not mirroring" % self.filename) - self.abort() - else: - self.uploader.client.complete_multipart_upload( - Bucket=self.bucket, - Key=self.filename, - UploadId=self.upload.get("UploadId"), - MultipartUpload=dict(Parts=self.parts), - ) - mirror_url = self.uploader.final_mirror_url(self.bucket, self.filename) - self.representation.set_as_mirrored(mirror_url) - logging.info("MIRRORED %s" % self.representation.mirror_url) - - def abort(self): - logging.info("Aborting upload of %s" % self.filename) - self.uploader.client.abort_multipart_upload( - Bucket=self.bucket, - Key=self.filename, - UploadId=self.upload.get("UploadId"), - ) - - -def _get_available_regions() -> List[str]: - """Returns a list of available S3 regions - - :return: List of available S3 regions - """ - session = boto3.session.Session() - - return session.get_available_regions(service_name="s3") - - -def _get_available_region_options() -> List[ConfigurationOption]: - """Returns a list of available options for S3Uploader's Region configuration setting - - :return: List of available options for S3Uploader's Region configuration setting - """ - available_regions = sorted(_get_available_regions()) - options = [ConfigurationOption(region, region) for region in available_regions] - - return options - - -class S3AddressingStyle(Enum): - """Enumeration of different addressing styles supported by boto""" - - VIRTUAL = "virtual" - PATH = "path" - AUTO = "auto" - - -class S3UploaderConfiguration(ConfigurationGrouping): - S3_REGION = "s3_region" - S3_DEFAULT_REGION = "us-east-1" - - S3_ADDRESSING_STYLE = "s3_addressing_style" - S3_DEFAULT_ADDRESSING_STYLE = S3AddressingStyle.VIRTUAL.value - - S3_PRESIGNED_URL_EXPIRATION = "s3_presigned_url_expiration" - S3_DEFAULT_PRESIGNED_URL_EXPIRATION = 3600 - - BOOK_COVERS_BUCKET_KEY = "book_covers_bucket" - OA_CONTENT_BUCKET_KEY = "open_access_content_bucket" - PROTECTED_CONTENT_BUCKET_KEY = "protected_content_bucket" - ANALYTICS_BUCKET_KEY = "analytics_bucket" - - MARC_BUCKET_KEY = "marc_bucket" - - URL_TEMPLATE_KEY = "bucket_name_transform" - URL_TEMPLATE_HTTP = "http" - URL_TEMPLATE_HTTPS = "https" - URL_TEMPLATE_DEFAULT = "identity" - - URL_TEMPLATES_BY_TEMPLATE = { - URL_TEMPLATE_HTTP: "http://%(bucket)s/%(key)s", - URL_TEMPLATE_HTTPS: "https://%(bucket)s/%(key)s", - URL_TEMPLATE_DEFAULT: "https://%(bucket)s.s3.%(region)s/%(key)s", - } - - access_key = ConfigurationMetadata( - key=ExternalIntegration.USERNAME, - label=_("Access Key"), - description="", - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - secret_key = ConfigurationMetadata( - key=ExternalIntegration.PASSWORD, - label=_("Secret Key"), - description=_( - "If the Access Key and Secret Key are not given here credentials " - "will be used as outlined in the " - 'Boto3 documenation. ' - "If Access Key is given, Secrent Key must also be given." - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - book_covers_bucket = ConfigurationMetadata( - key=BOOK_COVERS_BUCKET_KEY, - label=_("Book Covers Bucket"), - description=_( - "All book cover images encountered will be mirrored to this S3 bucket. " - "Large images will be scaled down, and the scaled-down copies will also be uploaded to this bucket. " - "

The bucket must already exist—it will not be created automatically.

" - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - open_access_content_bucket = ConfigurationMetadata( - key=OA_CONTENT_BUCKET_KEY, - label=_("Open Access Content Bucket"), - description=_( - "All open-access books encountered will be uploaded to this S3 bucket. " - "

The bucket must already exist—it will not be created automatically.

" - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - protected_access_content_bucket = ConfigurationMetadata( - key=PROTECTED_CONTENT_BUCKET_KEY, - label=_("Protected Access Content Bucket"), - description=_( - "Self-hosted books will be uploaded to this S3 bucket. " - "

The bucket must already exist—it will not be created automatically.

" - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - analytics_bucket = ConfigurationMetadata( - key=ANALYTICS_BUCKET_KEY, - label=_("Analytics Bucket"), - description=_( - "Text files containing analytics data will be uploaded to this " - "S3 bucket. " - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - marc_file_bucket = ConfigurationMetadata( - key=MARC_BUCKET_KEY, - label=_("MARC File Bucket"), - description=_( - "All generated MARC files will be uploaded to this S3 bucket. " - "

The bucket must already exist—it will not be created automatically.

" - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - s3_region = ConfigurationMetadata( - key=S3_REGION, - label=_("S3 region"), - description=_("S3 region which will be used for storing the content."), - type=ConfigurationAttributeType.SELECT, - required=False, - default=S3_DEFAULT_REGION, - options=_get_available_region_options(), - ) - - s3_addressing_style = ConfigurationMetadata( - key=S3_ADDRESSING_STYLE, - label=_("S3 addressing style"), - description=_( - "Buckets created after September 30, 2020, will support only virtual hosted-style requests. " - "Path-style requests will continue to be supported for buckets created on or before this date. " - "For more information, " - 'see ' - "Amazon S3 Path Deprecation Plan - The Rest of the Story." - ), - type=ConfigurationAttributeType.SELECT, - required=False, - default=S3_DEFAULT_REGION, - options=[ - ConfigurationOption(S3AddressingStyle.VIRTUAL.value, _("Virtual")), - ConfigurationOption(S3AddressingStyle.PATH.value, _("Path")), - ConfigurationOption(S3AddressingStyle.AUTO.value, _("Auto")), - ], - ) - - s3_presigned_url_expiration = ConfigurationMetadata( - key=S3_PRESIGNED_URL_EXPIRATION, - label=_("S3 presigned URL expiration"), - description=_("Time in seconds for the presigned URL to remain valid"), - type=ConfigurationAttributeType.NUMBER, - required=False, - default=S3_DEFAULT_PRESIGNED_URL_EXPIRATION, - ) - - url_template = ConfigurationMetadata( - key=URL_TEMPLATE_KEY, - label=_("URL format"), - description=_( - "A file mirrored to S3 is available at http://{bucket}.s3.{region}.amazonaws.com/{filename}. " - "If you've set up your DNS so that http://[bucket]/ or https://[bucket]/ points to the appropriate " - "S3 bucket, you can configure this S3 integration to shorten the URLs. " - "

If you haven't set up your S3 buckets, don't change this from the default -- " - "you'll get URLs that don't work.

" - ), - type=ConfigurationAttributeType.SELECT, - required=False, - default=URL_TEMPLATE_DEFAULT, - options=[ - ConfigurationOption( - URL_TEMPLATE_DEFAULT, - _("S3 Default: https://{bucket}.s3.{region}.amazonaws.com/{file}"), - ), - ConfigurationOption( - URL_TEMPLATE_HTTPS, _("HTTPS: https://{bucket}/{file}") - ), - ConfigurationOption(URL_TEMPLATE_HTTP, _("HTTP: http://{bucket}/{file}")), - ], - ) - - -class S3Uploader(MirrorUploader): - NAME = ExternalIntegration.S3 - - # AWS S3 host - S3_HOST = "amazonaws.com" - - SETTINGS = S3UploaderConfiguration.to_settings() - - SITEWIDE = True - - def __init__( - self, - integration: ExternalIntegration, - client_class: Optional[Any] = None, - host: str = S3_HOST, - ) -> None: - """Instantiate an S3Uploader from an ExternalIntegration. - - :param integration: An ExternalIntegration - :param client_class: Mock object (or class) to use (or instantiate) - instead of boto3.client. - :param host: Host used by this integration - """ - super().__init__(integration, host) - - if not client_class: - client_class = boto3.client - - self._s3_region = integration.setting( - S3UploaderConfiguration.S3_REGION - ).value_or_default(S3UploaderConfiguration.S3_DEFAULT_REGION) - - self._s3_addressing_style = integration.setting( - S3UploaderConfiguration.S3_ADDRESSING_STYLE - ).value_or_default(S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE) - - self._s3_presigned_url_expiration = integration.setting( - S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION - ).value_or_default(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION) - - if callable(client_class): - # Pass None into boto3 if we get an empty string. - access_key = integration.username if integration.username != "" else None - secret_key = integration.password if integration.password != "" else None - config = Config( - signature_version=botocore.UNSIGNED, - s3={"addressing_style": self._s3_addressing_style}, - ) - # NOTE: Unfortunately, boto ignores credentials (aws_access_key_id, aws_secret_access_key) - # when using botocore.UNSIGNED signature version and doesn't authenticate the client in this case. - # That's why we have to create two S3 boto clients: - # - the first client WITHOUT authentication which is used for generating unsigned URLs - # - the second client WITH authentication used for working with S3: uploading files, etc. - self._s3_link_client = client_class( - "s3", - region_name=self._s3_region, - aws_access_key_id=None, - aws_secret_access_key=None, - config=config, - ) - self.client = client_class( - "s3", - region_name=self._s3_region, - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - ) - else: - self.client = client_class - - self.url_transform = integration.setting( - S3UploaderConfiguration.URL_TEMPLATE_KEY - ).value_or_default(S3UploaderConfiguration.URL_TEMPLATE_DEFAULT) - - # Transfer information about bucket names from the - # ExternalIntegration to the S3Uploader object, so we don't - # have to keep the ExternalIntegration around. - self.buckets = dict() - for setting in integration.settings: - if setting.key is not None and setting.key.endswith("_bucket"): - self.buckets[setting.key] = setting.value - - def _generate_s3_url(self, bucket: str, path: Any) -> str: - """Generates an S3 URL - - :param bucket: Bucket name - :return: S3 URL - """ - key = path - - # NOTE: path can be an empty string meaning that - # we need to generate a URL pointing at the root directory of the bucket. - # However, boto3 doesn't allow us to pass the key as an empty string. - # As a workaround we set it to a dummy string and later remove it from the generated URL - if not path: - key = "dummy" - - url = self._s3_link_client.generate_presigned_url( - "get_object", ExpiresIn=0, Params={"Bucket": bucket, "Key": key} - ) - - # If the path was an empty string we need to strip out trailing dummy string ending up with a URL - # pointing at the root directory of the bucket - if not path: - url = url.replace("/" + key, "/") - - return url - - def sign_url(self, url: str, expiration: Optional[int] = None) -> str: - """Signs a URL and make it expirable - - :param url: URL - :param expiration: (Optional) Time in seconds for the presigned URL to remain valid. - If it's empty, S3_PRESIGNED_URL_EXPIRATION configuration setting is used - :return: Signed expirable link - """ - if expiration is None: - expiration = self._s3_presigned_url_expiration - - bucket, key = self.split_url(url) - url = self.client.generate_presigned_url( - "get_object", - ExpiresIn=int(expiration), - Params={"Bucket": bucket, "Key": key}, - ) - - return url - - def get_bucket(self, bucket_key): - """Gets the bucket for a particular use based on the given key""" - return self.buckets.get(bucket_key) - - def url(self, bucket, path): - """The URL to a resource on S3 identified by bucket and path.""" - custom_url = bucket.startswith("http://") or bucket.startswith("https://") - - if isinstance(path, list): - # This is a list of key components that need to be quoted - # and assembled. - path = self.key_join(path, encode=custom_url) - if isinstance(path, bytes): - path = path.decode("utf-8") - if path.startswith("/"): - path = path[1:] - - if custom_url: - url = bucket - - if not url.endswith("/"): - url += "/" - - return url + path - else: - url = self._generate_s3_url(bucket, path) - - return url - - def cover_image_root(self, bucket, data_source, scaled_size=None): - """The root URL to the S3 location of cover images for - the given data source. - """ - parts = [] - if scaled_size: - parts.extend(["scaled", str(scaled_size)]) - if isinstance(data_source, str): - data_source_name = data_source - else: - data_source_name = data_source.name - parts.append(data_source_name) - url = self.url(bucket, parts) - if not url.endswith("/"): - url += "/" - return url - - def content_root(self, bucket): - """The root URL to the S3 location of hosted content of - the given type. - """ - return self.url(bucket, "/") - - def marc_file_root(self, bucket, library): - url = self.url(bucket, [library.short_name]) - if not url.endswith("/"): - url += "/" - return url - - def _analytics_file_root(self, bucket, library) -> str: - url = self.url(bucket, [library.short_name]) - if not url.endswith("/"): - url += "/" - return url - - @classmethod - def key_join(self, key, encode=True): - """Quote the path portions of an S3 key while leaving the path - characters themselves alone. - - :param key: Either a key, or a list of parts to be - assembled into a key. - - :return: A string that can be used as an S3 key. - """ - if isinstance(key, str): - parts = key.split("/") - else: - parts = key - new_parts = [] - - for part in parts: - if isinstance(part, bytes): - part = part.decode("utf-8") - if encode: - part = quote(str(part)) - new_parts.append(part) - - return "/".join(new_parts) - - def book_url( - self, - identifier, - extension=".epub", - open_access=True, - data_source=None, - title=None, - ): - """The path to the hosted EPUB file for the given identifier.""" - bucket = self.get_bucket( - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY - if open_access - else S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY - ) - root = self.content_root(bucket) - - if not extension.startswith("."): - extension = "." + extension - - parts = [] - if data_source: - parts.append(data_source.name) - parts.append(identifier.type) - if title: - # e.g. DataSource/ISBN/1234/Title.epub - parts.append(identifier.identifier) - filename = title - else: - # e.g. DataSource/ISBN/1234.epub - filename = identifier.identifier - parts.append(filename + extension) - return root + self.key_join(parts) - - def cover_image_url(self, data_source, identifier, filename, scaled_size=None): - """The path to the hosted cover image for the given identifier.""" - bucket = self.get_bucket(S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY) - root = self.cover_image_root(bucket, data_source, scaled_size) - parts = [identifier.type, identifier.identifier, filename] - return root + self.key_join(parts) - - def marc_file_url(self, library, lane, end_time, start_time=None): - """The path to the hosted MARC file for the given library, lane, - and date range.""" - bucket = self.get_bucket(S3UploaderConfiguration.MARC_BUCKET_KEY) - root = self.marc_file_root(bucket, library) - if start_time: - time_part = str(start_time) + "-" + str(end_time) - else: - time_part = str(end_time) - parts = [time_part, lane.display_name] - return root + self.key_join(parts) + ".mrc" - - def analytics_file_url( - self, - library: Library, - license_pool: LicensePool, - event_type: str, - end_time: datetime, - start_time: Optional[datetime] = None, - ): - """The path to the analytics data file for the given library, license - pool and date range.""" - bucket = self.get_bucket(S3UploaderConfiguration.ANALYTICS_BUCKET_KEY) - root = self._analytics_file_root(bucket, library) - if start_time: - time_part = str(start_time) + "-" + str(end_time) - else: - time_part = str(end_time) - - # ensure the uniqueness of file name (in case of overlapping events) - collection = license_pool.collection_id if license_pool else "NONE" - random_string = "".join(random.choices(string.ascii_lowercase, k=10)) - file_name = "-".join([time_part, event_type, str(collection), random_string]) - # nest file in directories that allow for easy purging by year, month or day - parts = [ - str(end_time.year), - str(end_time.month), - str(end_time.day), - file_name + ".json", - ] - return root + self.key_join(parts) - - def split_url(self, url: str, unquote: bool = True) -> Tuple[str, str]: - """Splits the URL into the components: bucket and file path - - :param url: URL - :param unquote: Boolean value indicating whether it's required to unquote URL elements - :return: Tuple (bucket, file path) - """ - scheme, netloc, path, query, fragment = urlsplit(url) - - if self.is_self_url(url): - host_parts = netloc.split(".") - host_parts_count = len(host_parts) - - # 1. Path-style requests - # 1.1. URL without a region: https://s3.amazonaws.com/{bucket}/{path} - # 1.2. URL with a region: https://s3.{region}.amazonaws.com/{bucket}/{path} - - # 2. Virtual hosted-style requests - # 2.1. Legacy global endpoints: https://{bucket}.s3.amazonaws.com/{path} - # 2.2. Endpoints with s3-region: https://{bucket}.s3-{region}.amazonaws.com/{path} - # 2.3. Endpoints with s3.region: https://{bucket}.s3.{region}.amazonaws.com/{path} - - if host_parts_count == 3 or ( - host_parts_count == 4 and host_parts[0] == "s3" - ): - if path.startswith("/"): - path = path[1:] - bucket, filename = path.split("/", 1) - else: - bucket = host_parts[0] - - if path.startswith("/"): - path = path[1:] - - filename = path - else: - bucket = netloc - filename = path[1:] - - if unquote: - filename = unquote_plus(filename) - - return bucket, filename - - def final_mirror_url(self, bucket, key): - """Determine the URL to pass into Representation.set_as_mirrored, - assuming that it was successfully uploaded to the given - `bucket` as `key`. - - Depending on ExternalIntegration configuration this may - be any of the following: - - https://{bucket}.s3.{region}.amazonaws.com/{key} - http://{bucket}/{key} - https://{bucket}/{key} - """ - templates = S3UploaderConfiguration.URL_TEMPLATES_BY_TEMPLATE - default = templates[S3UploaderConfiguration.URL_TEMPLATE_DEFAULT] - template = templates.get(self.url_transform, default) - - if template == default: - link = self._generate_s3_url(bucket, self.key_join(key, encode=False)) - else: - link = template % dict(bucket=bucket, key=self.key_join(key)) - - return link - - def mirror_one( - self, - representation: Representation, - mirror_to: str, - collection: Optional[Collection] = None, - ) -> Any: - """Mirror a single representation to the given URL. - - :param representation: Book's representation - :param mirror_to: Mirror URL - :param collection: Collection - """ - # Turn the original URL into an s3.amazonaws.com URL. - media_type = representation.external_media_type - bucket, remote_filename = self.split_url(mirror_to) - fh = representation.external_content() - try: - result = self.client.upload_fileobj( - Fileobj=fh, - Bucket=bucket, - Key=remote_filename, - ExtraArgs=dict(ContentType=media_type), - ) - - # Since upload_fileobj completed without a problem, we - # know the file is available at - # https://s3.amazonaws.com/{bucket}/{remote_filename}. But - # that may not be the URL we want to store. - mirror_url = self.final_mirror_url(bucket, remote_filename) - representation.set_as_mirrored(mirror_url) - - source = representation.local_content_path - if representation.url != mirror_url: - source = representation.url - if source: - logging.info("MIRRORED %s => %s", source, representation.mirror_url) - else: - logging.info("MIRRORED %s", representation.mirror_url) - except (BotoCoreError, ClientError) as e: - # BotoCoreError happens when there's a problem with - # the network transport. ClientError happens when - # there's a problem with the credentials. Either way, - # the best thing to do is treat this as a transient - # error and try again later. There's no scenario where - # giving up is the right move. - logging.error("Error uploading %s: %r", mirror_to, e, exc_info=e) - finally: - fh.close() - - @contextmanager - def multipart_upload( - self, representation, mirror_to, upload_class=MultipartS3Upload - ): - upload = upload_class(self, representation, mirror_to) - try: - yield upload - upload.complete() - except Exception as e: - logging.error("Multipart upload of %s failed: %r", mirror_to, e, exc_info=e) - upload.abort() - representation.mirror_exception = str(e) - - -# MirrorUploader.implementation will instantiate an S3Uploader -# for storage integrations with protocol 'Amazon S3'. -MirrorUploader.IMPLEMENTATION_REGISTRY[S3Uploader.NAME] = S3Uploader - - -class MinIOUploaderConfiguration(ConfigurationGrouping): - ENDPOINT_URL = "ENDPOINT_URL" - - endpoint_url = ConfigurationMetadata( - key=ENDPOINT_URL, - label=_("Endpoint URL"), - description=_("MinIO's endpoint URL"), - type=ConfigurationAttributeType.TEXT, - required=True, - ) - - -class MinIOUploader(S3Uploader): - NAME = ExternalIntegration.MINIO - - SETTINGS = S3Uploader.SETTINGS + [ - MinIOUploaderConfiguration.endpoint_url.to_settings() - ] - - def __init__(self, integration, client_class=None): - """Instantiate an S3Uploader from an ExternalIntegration. - - :param integration: An ExternalIntegration - - :param client_class: Mock object (or class) to use (or instantiate) - instead of boto3.client. - """ - endpoint_url = integration.setting( - MinIOUploaderConfiguration.ENDPOINT_URL - ).value - - _, host, _, _, _ = urlsplit(endpoint_url) - - if not client_class: - client_class = boto3.client - - if callable(client_class): - client_class = functools.partial(client_class, endpoint_url=endpoint_url) - else: - self.client = client_class - - super().__init__(integration, client_class, host) - - -# MirrorUploader.implementation will instantiate an MinIOUploader instance -# for storage integrations with protocol 'MinIO'. -MirrorUploader.IMPLEMENTATION_REGISTRY[MinIOUploader.NAME] = MinIOUploader - - -class MockS3Uploader(S3Uploader): - """A dummy uploader for use in tests.""" - - buckets = { - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "test-cover-bucket", - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "test-content-bucket", - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "test-content-bucket", - S3UploaderConfiguration.MARC_BUCKET_KEY: "test-marc-bucket", - } - - def __init__(self, fail=False, *args, **kwargs): - self.uploaded = [] - self.content = [] - self.destinations = [] - self.fail = fail - self._s3_region = S3UploaderConfiguration.S3_DEFAULT_REGION - self._s3_addressing_style = S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE - config = Config( - signature_version=botocore.UNSIGNED, - s3={"addressing_style": self._s3_addressing_style}, - ) - self._s3_link_client = boto3.client( - "s3", - region_name=self._s3_region, - aws_access_key_id=None, - aws_secret_access_key=None, - config=config, - ) - self.client = boto3.client( - "s3", - region_name=self._s3_region, - aws_access_key_id=None, - aws_secret_access_key=None, - ) - - self.client - - def mirror_one(self, representation, **kwargs): - mirror_to = kwargs["mirror_to"] - self.uploaded.append(representation) - self.destinations.append(mirror_to) - self.content.append(representation.content) - if self.fail: - representation.mirror_exception = "Exception" - representation.mirrored_at = None - else: - representation.set_as_mirrored(mirror_to) - - @contextmanager - def multipart_upload(self, representation, mirror_to): - class MockMultipartS3Upload(MultipartS3Upload): - def __init__(self): - self.parts = [] - - def upload_part(self, part): - self.parts.append(part) - - upload = MockMultipartS3Upload() - yield upload - - self.uploaded.append(representation) - self.destinations.append(mirror_to) - self.content.append(upload.parts) - if self.fail: - representation.mirror_exception = "Exception" - representation.mirrored_at = None - else: - representation.set_as_mirrored(mirror_to) - - -class MockS3Client: - """This pool lets us test the real S3Uploader class with a mocked-up - boto3 client. - """ - - def __init__( - self, - service, - region_name, - aws_access_key_id, - aws_secret_access_key, - config=None, - ): - assert service == "s3" - self.region_name = region_name - self.access_key = aws_access_key_id - self.secret_key = aws_secret_access_key - self.config = config - self.uploads = [] - self.parts = [] - self.fail_with = None - - def upload_fileobj(self, Fileobj, Bucket, Key, ExtraArgs=None, **kwargs): - if self.fail_with: - raise self.fail_with - self.uploads.append((Fileobj.read(), Bucket, Key, ExtraArgs, kwargs)) - return None - - def create_multipart_upload(self, **kwargs): - if self.fail_with: - raise self.fail_with - return dict(UploadId=1) - - def upload_part(self, **kwargs): - if self.fail_with: - raise self.fail_with - self.parts.append(kwargs) - return dict(ETag="etag") - - def complete_multipart_upload(self, **kwargs): - self.uploads.append(kwargs) - self.parts = [] - return None - - def abort_multipart_upload(self, **kwargs): - self.parts = [] - return None - - def generate_presigned_url( - self, ClientMethod, Params=None, ExpiresIn=3600, HttpMethod=None - ): - return None diff --git a/core/scripts.py b/core/scripts.py index 328de301a3..62290e4575 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -58,6 +58,7 @@ from .monitor import CollectionMonitor, ReaperMonitor from .opds_import import OPDSImporter, OPDSImportMonitor from .overdrive import OverdriveCoreAPI +from .service.container import Services, container_instance from .util import fast_query_count from .util.datetime_helpers import strptime_utc, utc_now from .util.personal_names import contributor_name_match_ratio, display_name_to_sort_name @@ -71,6 +72,10 @@ def _db(self) -> Session: self._session = production_session() return self._session + @property + def services(self) -> Services: + return self._services + @property def script_name(self): """Find or guess the name of the script. @@ -110,7 +115,7 @@ def parse_time(cls, time_string): continue raise ValueError("Could not parse time: %s" % time_string) - def __init__(self, _db=None, *args, **kwargs): + def __init__(self, _db=None, services: Optional[Services] = None, *args, **kwargs): """Basic constructor. :_db: A database session to be used instead of @@ -119,6 +124,8 @@ def __init__(self, _db=None, *args, **kwargs): if _db: self._session = _db + self._services = container_instance() if services is None else services + def run(self): DataSource.well_known_sources(self._db) start_time = utc_now() diff --git a/core/service/configuration.py b/core/service/configuration.py new file mode 100644 index 0000000000..14321668e5 --- /dev/null +++ b/core/service/configuration.py @@ -0,0 +1,52 @@ +from pathlib import Path +from typing import Any + +from pydantic import BaseSettings, ValidationError + +from core.config import CannotLoadConfiguration + + +class ServiceConfiguration(BaseSettings): + """ + Base class for our service configuration. Each subclass should define its own + configuration settings as pydantic fields. The settings will be loaded from + environment variables with the prefix defined in the Config class. + + The env_prefix should also be overridden in subclasses to provide a unique prefix + for each service. + """ + + class Config: + # See the pydantic docs for information on these settings + # https://docs.pydantic.dev/usage/model_config/ + + # Each sub-config will have its own prefix + env_prefix = "PALACE_" + + # Strip whitespace from all strings + anystr_strip_whitespace = True + + # Forbid mutation, settings should be loaded once from environment. + allow_mutation = False + + # Allow env vars to be loaded from a .env file + # This loads the .env file from the root of the project + env_file = str(Path(__file__).parent.parent.parent.absolute() / ".env") + + # Nested settings will be loaded from environment variables with this delimiter. + env_nested_delimiter = "__" + + def __init__(self, *args: Any, **kwargs: Any): + try: + super().__init__(*args, **kwargs) + except ValidationError as error_exception: + # The services settings failed to validate, we capture the ValidationError and + # raise a more specific CannotLoadConfiguration error. + errors = error_exception.errors() + error_log_message = f"Error loading settings from environment:" + for error in errors: + delimiter = self.__config__.env_nested_delimiter or "__" + error_location = delimiter.join(str(e).upper() for e in error["loc"]) + env_var_name = f"{self.__config__.env_prefix}{error_location}" + error_log_message += f"\n {env_var_name}: {error['msg']}" + raise CannotLoadConfiguration(error_log_message) from error_exception diff --git a/core/service/container.py b/core/service/container.py new file mode 100644 index 0000000000..b204df6462 --- /dev/null +++ b/core/service/container.py @@ -0,0 +1,37 @@ +from dependency_injector import providers +from dependency_injector.containers import DeclarativeContainer + +from core.service.storage.configuration import StorageConfiguration +from core.service.storage.container import Storage + + +class Services(DeclarativeContainer): + + config = providers.Configuration() + + storage = providers.Container( + Storage, + config=config.storage, + ) + + +def create_container() -> Services: + container = Services() + container.config.from_dict({"storage": StorageConfiguration().dict()}) + return container + + +_container_instance = None + + +def container_instance() -> Services: + # Create a singleton container instance, I'd like this to be used sparingly + # and eventually have it go away, but there are places in the code that + # are currently difficult to refactor to pass the container into the + # constructor. + # If at all possible please use the container that is stored in the CirculationManager + # or Scripts classes instead of using this function. + global _container_instance + if _container_instance is None: + _container_instance = create_container() + return _container_instance diff --git a/core/service/storage/configuration.py b/core/service/storage/configuration.py new file mode 100644 index 0000000000..6e9b51f052 --- /dev/null +++ b/core/service/storage/configuration.py @@ -0,0 +1,38 @@ +from typing import Optional + +import boto3 +from pydantic import AnyHttpUrl, parse_obj_as, validator + +from core.service.configuration import ServiceConfiguration + + +class StorageConfiguration(ServiceConfiguration): + region: Optional[str] = None + access_key: Optional[str] = None + secret_key: Optional[str] = None + + public_access_bucket: Optional[str] = None + analytics_bucket: Optional[str] = None + + endpoint_url: Optional[AnyHttpUrl] = None + + url_template: AnyHttpUrl = parse_obj_as( + AnyHttpUrl, "https://{bucket}.s3.{region}.amazonaws.com/{key}" + ) + + @validator("region") + def validate_region(cls, v: Optional[str]) -> Optional[str]: + # No validation if region is not provided. + if v is None: + return None + + session = boto3.session.Session() + regions = session.get_available_regions(service_name="s3") + if v not in regions: + raise ValueError( + f"Invalid region: {v}. Region must be one of: {' ,'.join(regions)}." + ) + return v + + class Config: + env_prefix = "PALACE_STORAGE_" diff --git a/core/service/storage/container.py b/core/service/storage/container.py new file mode 100644 index 0000000000..54cf2db835 --- /dev/null +++ b/core/service/storage/container.py @@ -0,0 +1,34 @@ +import boto3 +from dependency_injector import providers +from dependency_injector.containers import DeclarativeContainer + +from core.service.storage.s3 import S3Service + + +class Storage(DeclarativeContainer): + config = providers.Configuration() + + s3_client = providers.Singleton( + boto3.client, + service_name="s3", + aws_access_key_id=config.access_key, + aws_secret_access_key=config.secret_key, + region_name=config.region, + endpoint_url=config.endpoint_url, + ) + + analytics = providers.Singleton( + S3Service.factory, + client=s3_client, + region=config.region, + bucket=config.analytics_bucket, + url_template=config.url_template, + ) + + public = providers.Singleton( + S3Service.factory, + client=s3_client, + region=config.region, + bucket=config.public_access_bucket, + url_template=config.url_template, + ) diff --git a/core/service/storage/s3.py b/core/service/storage/s3.py new file mode 100644 index 0000000000..e73c428004 --- /dev/null +++ b/core/service/storage/s3.py @@ -0,0 +1,232 @@ +from __future__ import annotations + +import dataclasses +import logging +import sys +from io import BytesIO +from string import Formatter +from types import TracebackType +from typing import TYPE_CHECKING, BinaryIO, List, Optional, Type +from urllib.parse import quote + +from botocore.exceptions import BotoCoreError, ClientError + +from core.config import CannotLoadConfiguration + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + +if TYPE_CHECKING: + from mypy_boto3_s3 import S3Client + from mypy_boto3_s3.type_defs import CreateMultipartUploadOutputTypeDef + + +@dataclasses.dataclass +class MultipartS3UploadPart: + ETag: str + PartNumber: int + + +class MultipartS3ContextManager: + def __init__( + self, + client: S3Client, + bucket: str, + key: str, + url: str, + media_type: Optional[str] = None, + ) -> None: + self.client = client + self.key = key + self.bucket = bucket + self.part_number = 1 + self.parts: List[MultipartS3UploadPart] = [] + self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") + self.media_type = media_type + self.upload: Optional[CreateMultipartUploadOutputTypeDef] = None + self.upload_id: Optional[str] = None + self._complete = False + self._url = url + self._exception: Optional[BaseException] = None + + def __enter__(self) -> Self: + params = { + "Bucket": self.bucket, + "Key": self.key, + } + if self.media_type is not None: + params["ContentType"] = self.media_type + self.upload = self.client.create_multipart_upload(**params) # type: ignore[arg-type] + self.upload_id = self.upload["UploadId"] + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> bool: + if exc_val is None: + self._upload_complete() + else: + self.log.debug( + f"Exception {exc_type} occurred during upload of {self.key}. Aborting.", + exc_info=exc_val, + ) + self._upload_abort() + self._exception = exc_val + if isinstance(exc_val, (ClientError, BotoCoreError)): + return True + return False + + def upload_part(self, content: bytes) -> None: + if self.complete or self.exception or self.upload_id is None: + raise RuntimeError("Upload already complete or aborted.") + + logging.info( + f"Uploading part {self.part_number} of {self.key} to {self.bucket}" + ) + result = self.client.upload_part( + Body=content, + Bucket=self.bucket, + Key=self.key, + PartNumber=self.part_number, + UploadId=self.upload_id, + ) + self.parts.append(MultipartS3UploadPart(result["ETag"], self.part_number)) + self.part_number += 1 + + def _upload_complete(self) -> None: + if not self.parts: + logging.info(f"Upload of {self.key} was empty.") + self._upload_abort() + elif self.upload_id is None: + raise RuntimeError("Upload ID not set.") + else: + self.client.complete_multipart_upload( + Bucket=self.bucket, + Key=self.key, + UploadId=self.upload_id, + MultipartUpload=dict(Parts=[dataclasses.asdict(part) for part in self.parts]), # type: ignore[misc] + ) + self._complete = True + + def _upload_abort(self) -> None: + logging.info(f"Aborting upload of {self.key}.") + if self.upload_id is not None: + self.client.abort_multipart_upload( + Bucket=self.bucket, + Key=self.key, + UploadId=self.upload_id, + ) + else: + logging.error("Upload ID not set, unable to abort.") + + @property + def url(self) -> str: + return self._url + + @property + def complete(self) -> bool: + return self._complete + + @property + def exception(self) -> Optional[BaseException]: + return self._exception + + +class S3Service: + def __init__( + self, + client: S3Client, + region: Optional[str], + bucket: str, + url_template: str, + ) -> None: + self.client = client + self.region = region + self.bucket = bucket + self.url_template = url_template + self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") + + # Validate the URL template. + formatter = Formatter() + field_tuple = formatter.parse(self.url_template) + field_names = [field[1] for field in field_tuple] + if "region" in field_names and self.region is None: + raise CannotLoadConfiguration( + "URL template requires a region, but no region was provided." + ) + if "key" not in field_names: + raise CannotLoadConfiguration( + "URL template requires a key, but no key was provided." + ) + + @classmethod + def factory( + cls, + client: S3Client, + region: Optional[str], + bucket: Optional[str], + url_template: str, + ) -> Optional[Self]: + if bucket is None: + return None + return cls(client, region, bucket, url_template) + + def generate_url(self, key: str) -> str: + return self.url_template.format( + bucket=self.bucket, key=quote(key), region=self.region + ) + + def store( + self, + key: str, + content: str | bytes, + content_type: Optional[str] = None, + ) -> Optional[str]: + if isinstance(content, str): + content = content.encode("utf8") + return self.store_stream( + key=key, stream=BytesIO(content), content_type=content_type + ) + + def store_stream( + self, + key: str, + stream: BinaryIO, + content_type: Optional[str] = None, + ) -> Optional[str]: + try: + extra_args = {} if content_type is None else {"ContentType": content_type} + self.client.upload_fileobj( + Fileobj=stream, + Bucket=self.bucket, + Key=key, + ExtraArgs=extra_args, + ) + except (BotoCoreError, ClientError) as e: + # BotoCoreError happens when there's a problem with + # the network transport. ClientError happens when + # there's a problem with the credentials. Either way, + # the best thing to do is treat this as a transient + # error and try again later. There's no scenario where + # giving up is the right move. + self.log.exception(f"Error uploading {key}: {str(e)}") + return None + finally: + stream.close() + + url = self.generate_url(key) + self.log.info(f"Stored '{key}' to {url}.") + return url + + def multipart( + self, key: str, content_type: Optional[str] = None + ) -> MultipartS3ContextManager: + url = self.generate_url(key) + return MultipartS3ContextManager( + self.client, self.bucket, key, url, content_type + ) diff --git a/docker-compose.yml b/docker-compose.yml index 68f6f12565..a9996f8b7c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,16 @@ version: "3.9" + +# Common set of CM environment variables +# see: https://github.com/compose-spec/compose-spec/blob/master/spec.md#extension +x-cm-env-variables: &cm-env-variables + SIMPLIFIED_PRODUCTION_DATABASE: "postgresql://palace:test@pg:5432/circ" + PALACE_STORAGE_ACCESS_KEY: "palace" + PALACE_STORAGE_SECRET_KEY: "test123456789" + PALACE_STORAGE_ENDPOINT_URL: "http://minio:9000" + PALACE_STORAGE_PUBLIC_ACCESS_BUCKET: "public" + PALACE_STORAGE_ANALYTICS_BUCKET: "analytics" + PALACE_STORAGE_URL_TEMPLATE: "http://localhost:9000/{bucket}/{key}" + services: # example docker compose configuration for testing and development @@ -9,16 +21,14 @@ services: target: webapp ports: - "6500:80" - environment: - SIMPLIFIED_PRODUCTION_DATABASE: "postgresql://palace:test@pg:5432/circ" + environment: *cm-env-variables scripts: build: context: . dockerfile: docker/Dockerfile target: scripts - environment: - SIMPLIFIED_PRODUCTION_DATABASE: "postgresql://palace:test@pg:5432/circ" + environment: *cm-env-variables pg: image: "postgres:12" @@ -36,6 +46,7 @@ services: MINIO_ROOT_USER: "palace" MINIO_ROOT_PASSWORD: "test123456789" MINIO_SCHEME: "http" + MINIO_DEFAULT_BUCKETS: "public:download,analytics" os: build: diff --git a/docker/startup/01_set_simplified_environment.sh b/docker/startup/01_set_simplified_environment.sh index d26a0a8db8..709656bc56 100755 --- a/docker/startup/01_set_simplified_environment.sh +++ b/docker/startup/01_set_simplified_environment.sh @@ -14,7 +14,7 @@ touch $SIMPLIFIED_ENVIRONMENT # into an environment file. This will allow the environment to be loaded when # cron tasks are run, since crontab doesn't load them automatically. # The values of the variables are escaped as needed for the shell. -for var in $(printenv | grep -e SIMPLIFIED -e LIBSIMPLE | sed -e 's/^\([^=]*\)=.*$/\1/g'); do { +for var in $(printenv | grep -e SIMPLIFIED -e LIBSIMPLE -e PALACE | sed -e 's/^\([^=]*\)=.*$/\1/g'); do { printf "export ${var}=%q\n" $(printenv "${var}") } done > $SIMPLIFIED_ENVIRONMENT diff --git a/poetry.lock b/poetry.lock index 4c3ea44631..9c4966fc70 100644 --- a/poetry.lock +++ b/poetry.lock @@ -133,412 +133,426 @@ files = [ [[package]] name = "boto3" -version = "1.18.65" +version = "1.28.52" description = "The AWS SDK for Python" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "boto3-1.18.65-py3-none-any.whl", hash = "sha256:bbbc3a71949af31c33101ee0daf4db9b11148d67a4e574b6c66cbe35d985b5af"}, - {file = "boto3-1.18.65.tar.gz", hash = "sha256:baedf0637dd0e47cff60eb5591133f9c10aeb49581e2ad5a99794996a2dfbe09"}, + {file = "boto3-1.28.52-py3-none-any.whl", hash = "sha256:1d36db102517d62c6968b3b0636303241f56859d12dd071def4882fc6e030b20"}, + {file = "boto3-1.28.52.tar.gz", hash = "sha256:a34fc153cb2f6fb2f79a764286c967392e8aae9412381d943bddc576c4f7631a"}, ] [package.dependencies] -botocore = ">=1.21.65,<1.22.0" -jmespath = ">=0.7.1,<1.0.0" -s3transfer = ">=0.5.0,<0.6.0" +botocore = ">=1.31.52,<1.32.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.27.0" -description = "Type annotations for boto3 1.27.0 generated with mypy-boto3-builder 7.14.5" +version = "1.28.52" +description = "Type annotations for boto3 1.28.52 generated with mypy-boto3-builder 7.19.0" optional = false python-versions = ">=3.7" files = [ - {file = "boto3-stubs-1.27.0.tar.gz", hash = "sha256:55b094ebbefecb0b8015451707aafeb81c2313e216dbbd5e2f2efff70a02db63"}, - {file = "boto3_stubs-1.27.0-py3-none-any.whl", hash = "sha256:451749fc2bb0af5718bf1410473ec2e7f915bb860614cd0f6aca00c254ccf7e3"}, + {file = "boto3-stubs-1.28.52.tar.gz", hash = "sha256:12d7e5865aeec52e1f73b935b1c6a42e61325538fc2cb83a87a83e41e9485241"}, + {file = "boto3_stubs-1.28.52-py3-none-any.whl", hash = "sha256:3ea81a225e062f3bcb205467891086ea031519697ad54622e61251b52609b8d6"}, ] [package.dependencies] +boto3 = {version = "1.28.52", optional = true, markers = "extra == \"boto3\""} +botocore = {version = "1.31.52", optional = true, markers = "extra == \"boto3\""} botocore-stubs = "*" +mypy-boto3-cloudformation = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-dynamodb = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-ec2 = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-lambda = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-rds = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-s3 = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} +mypy-boto3-sqs = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} types-s3transfer = "*" -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.27.0,<1.28.0)"] -account = ["mypy-boto3-account (>=1.27.0,<1.28.0)"] -acm = ["mypy-boto3-acm (>=1.27.0,<1.28.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.27.0,<1.28.0)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.27.0,<1.28.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.27.0,<1.28.0)", "mypy-boto3-account (>=1.27.0,<1.28.0)", "mypy-boto3-acm (>=1.27.0,<1.28.0)", "mypy-boto3-acm-pca (>=1.27.0,<1.28.0)", "mypy-boto3-alexaforbusiness (>=1.27.0,<1.28.0)", "mypy-boto3-amp (>=1.27.0,<1.28.0)", "mypy-boto3-amplify (>=1.27.0,<1.28.0)", "mypy-boto3-amplifybackend (>=1.27.0,<1.28.0)", "mypy-boto3-amplifyuibuilder (>=1.27.0,<1.28.0)", "mypy-boto3-apigateway (>=1.27.0,<1.28.0)", "mypy-boto3-apigatewaymanagementapi (>=1.27.0,<1.28.0)", "mypy-boto3-apigatewayv2 (>=1.27.0,<1.28.0)", "mypy-boto3-appconfig (>=1.27.0,<1.28.0)", "mypy-boto3-appconfigdata (>=1.27.0,<1.28.0)", "mypy-boto3-appfabric (>=1.27.0,<1.28.0)", "mypy-boto3-appflow (>=1.27.0,<1.28.0)", "mypy-boto3-appintegrations (>=1.27.0,<1.28.0)", "mypy-boto3-application-autoscaling (>=1.27.0,<1.28.0)", "mypy-boto3-application-insights (>=1.27.0,<1.28.0)", "mypy-boto3-applicationcostprofiler (>=1.27.0,<1.28.0)", "mypy-boto3-appmesh (>=1.27.0,<1.28.0)", "mypy-boto3-apprunner (>=1.27.0,<1.28.0)", "mypy-boto3-appstream (>=1.27.0,<1.28.0)", "mypy-boto3-appsync (>=1.27.0,<1.28.0)", "mypy-boto3-arc-zonal-shift (>=1.27.0,<1.28.0)", "mypy-boto3-athena (>=1.27.0,<1.28.0)", "mypy-boto3-auditmanager (>=1.27.0,<1.28.0)", "mypy-boto3-autoscaling (>=1.27.0,<1.28.0)", "mypy-boto3-autoscaling-plans (>=1.27.0,<1.28.0)", "mypy-boto3-backup (>=1.27.0,<1.28.0)", "mypy-boto3-backup-gateway (>=1.27.0,<1.28.0)", "mypy-boto3-backupstorage (>=1.27.0,<1.28.0)", "mypy-boto3-batch (>=1.27.0,<1.28.0)", "mypy-boto3-billingconductor (>=1.27.0,<1.28.0)", "mypy-boto3-braket (>=1.27.0,<1.28.0)", "mypy-boto3-budgets (>=1.27.0,<1.28.0)", "mypy-boto3-ce (>=1.27.0,<1.28.0)", "mypy-boto3-chime (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-identity (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-meetings (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-messaging (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-voice (>=1.27.0,<1.28.0)", "mypy-boto3-cleanrooms (>=1.27.0,<1.28.0)", "mypy-boto3-cloud9 (>=1.27.0,<1.28.0)", "mypy-boto3-cloudcontrol (>=1.27.0,<1.28.0)", "mypy-boto3-clouddirectory (>=1.27.0,<1.28.0)", "mypy-boto3-cloudformation (>=1.27.0,<1.28.0)", "mypy-boto3-cloudfront (>=1.27.0,<1.28.0)", "mypy-boto3-cloudhsm (>=1.27.0,<1.28.0)", "mypy-boto3-cloudhsmv2 (>=1.27.0,<1.28.0)", "mypy-boto3-cloudsearch (>=1.27.0,<1.28.0)", "mypy-boto3-cloudsearchdomain (>=1.27.0,<1.28.0)", "mypy-boto3-cloudtrail (>=1.27.0,<1.28.0)", "mypy-boto3-cloudtrail-data (>=1.27.0,<1.28.0)", "mypy-boto3-cloudwatch (>=1.27.0,<1.28.0)", "mypy-boto3-codeartifact (>=1.27.0,<1.28.0)", "mypy-boto3-codebuild (>=1.27.0,<1.28.0)", "mypy-boto3-codecatalyst (>=1.27.0,<1.28.0)", "mypy-boto3-codecommit (>=1.27.0,<1.28.0)", "mypy-boto3-codedeploy (>=1.27.0,<1.28.0)", "mypy-boto3-codeguru-reviewer (>=1.27.0,<1.28.0)", "mypy-boto3-codeguru-security (>=1.27.0,<1.28.0)", "mypy-boto3-codeguruprofiler (>=1.27.0,<1.28.0)", "mypy-boto3-codepipeline (>=1.27.0,<1.28.0)", "mypy-boto3-codestar (>=1.27.0,<1.28.0)", "mypy-boto3-codestar-connections (>=1.27.0,<1.28.0)", "mypy-boto3-codestar-notifications (>=1.27.0,<1.28.0)", "mypy-boto3-cognito-identity (>=1.27.0,<1.28.0)", "mypy-boto3-cognito-idp (>=1.27.0,<1.28.0)", "mypy-boto3-cognito-sync (>=1.27.0,<1.28.0)", "mypy-boto3-comprehend (>=1.27.0,<1.28.0)", "mypy-boto3-comprehendmedical (>=1.27.0,<1.28.0)", "mypy-boto3-compute-optimizer (>=1.27.0,<1.28.0)", "mypy-boto3-config (>=1.27.0,<1.28.0)", "mypy-boto3-connect (>=1.27.0,<1.28.0)", "mypy-boto3-connect-contact-lens (>=1.27.0,<1.28.0)", "mypy-boto3-connectcampaigns (>=1.27.0,<1.28.0)", "mypy-boto3-connectcases (>=1.27.0,<1.28.0)", "mypy-boto3-connectparticipant (>=1.27.0,<1.28.0)", "mypy-boto3-controltower (>=1.27.0,<1.28.0)", "mypy-boto3-cur (>=1.27.0,<1.28.0)", "mypy-boto3-customer-profiles (>=1.27.0,<1.28.0)", "mypy-boto3-databrew (>=1.27.0,<1.28.0)", "mypy-boto3-dataexchange (>=1.27.0,<1.28.0)", "mypy-boto3-datapipeline (>=1.27.0,<1.28.0)", "mypy-boto3-datasync (>=1.27.0,<1.28.0)", "mypy-boto3-dax (>=1.27.0,<1.28.0)", "mypy-boto3-detective (>=1.27.0,<1.28.0)", "mypy-boto3-devicefarm (>=1.27.0,<1.28.0)", "mypy-boto3-devops-guru (>=1.27.0,<1.28.0)", "mypy-boto3-directconnect (>=1.27.0,<1.28.0)", "mypy-boto3-discovery (>=1.27.0,<1.28.0)", "mypy-boto3-dlm (>=1.27.0,<1.28.0)", "mypy-boto3-dms (>=1.27.0,<1.28.0)", "mypy-boto3-docdb (>=1.27.0,<1.28.0)", "mypy-boto3-docdb-elastic (>=1.27.0,<1.28.0)", "mypy-boto3-drs (>=1.27.0,<1.28.0)", "mypy-boto3-ds (>=1.27.0,<1.28.0)", "mypy-boto3-dynamodb (>=1.27.0,<1.28.0)", "mypy-boto3-dynamodbstreams (>=1.27.0,<1.28.0)", "mypy-boto3-ebs (>=1.27.0,<1.28.0)", "mypy-boto3-ec2 (>=1.27.0,<1.28.0)", "mypy-boto3-ec2-instance-connect (>=1.27.0,<1.28.0)", "mypy-boto3-ecr (>=1.27.0,<1.28.0)", "mypy-boto3-ecr-public (>=1.27.0,<1.28.0)", "mypy-boto3-ecs (>=1.27.0,<1.28.0)", "mypy-boto3-efs (>=1.27.0,<1.28.0)", "mypy-boto3-eks (>=1.27.0,<1.28.0)", "mypy-boto3-elastic-inference (>=1.27.0,<1.28.0)", "mypy-boto3-elasticache (>=1.27.0,<1.28.0)", "mypy-boto3-elasticbeanstalk (>=1.27.0,<1.28.0)", "mypy-boto3-elastictranscoder (>=1.27.0,<1.28.0)", "mypy-boto3-elb (>=1.27.0,<1.28.0)", "mypy-boto3-elbv2 (>=1.27.0,<1.28.0)", "mypy-boto3-emr (>=1.27.0,<1.28.0)", "mypy-boto3-emr-containers (>=1.27.0,<1.28.0)", "mypy-boto3-emr-serverless (>=1.27.0,<1.28.0)", "mypy-boto3-es (>=1.27.0,<1.28.0)", "mypy-boto3-events (>=1.27.0,<1.28.0)", "mypy-boto3-evidently (>=1.27.0,<1.28.0)", "mypy-boto3-finspace (>=1.27.0,<1.28.0)", "mypy-boto3-finspace-data (>=1.27.0,<1.28.0)", "mypy-boto3-firehose (>=1.27.0,<1.28.0)", "mypy-boto3-fis (>=1.27.0,<1.28.0)", "mypy-boto3-fms (>=1.27.0,<1.28.0)", "mypy-boto3-forecast (>=1.27.0,<1.28.0)", "mypy-boto3-forecastquery (>=1.27.0,<1.28.0)", "mypy-boto3-frauddetector (>=1.27.0,<1.28.0)", "mypy-boto3-fsx (>=1.27.0,<1.28.0)", "mypy-boto3-gamelift (>=1.27.0,<1.28.0)", "mypy-boto3-gamesparks (>=1.27.0,<1.28.0)", "mypy-boto3-glacier (>=1.27.0,<1.28.0)", "mypy-boto3-globalaccelerator (>=1.27.0,<1.28.0)", "mypy-boto3-glue (>=1.27.0,<1.28.0)", "mypy-boto3-grafana (>=1.27.0,<1.28.0)", "mypy-boto3-greengrass (>=1.27.0,<1.28.0)", "mypy-boto3-greengrassv2 (>=1.27.0,<1.28.0)", "mypy-boto3-groundstation (>=1.27.0,<1.28.0)", "mypy-boto3-guardduty (>=1.27.0,<1.28.0)", "mypy-boto3-health (>=1.27.0,<1.28.0)", "mypy-boto3-healthlake (>=1.27.0,<1.28.0)", "mypy-boto3-honeycode (>=1.27.0,<1.28.0)", "mypy-boto3-iam (>=1.27.0,<1.28.0)", "mypy-boto3-identitystore (>=1.27.0,<1.28.0)", "mypy-boto3-imagebuilder (>=1.27.0,<1.28.0)", "mypy-boto3-importexport (>=1.27.0,<1.28.0)", "mypy-boto3-inspector (>=1.27.0,<1.28.0)", "mypy-boto3-inspector2 (>=1.27.0,<1.28.0)", "mypy-boto3-internetmonitor (>=1.27.0,<1.28.0)", "mypy-boto3-iot (>=1.27.0,<1.28.0)", "mypy-boto3-iot-data (>=1.27.0,<1.28.0)", "mypy-boto3-iot-jobs-data (>=1.27.0,<1.28.0)", "mypy-boto3-iot-roborunner (>=1.27.0,<1.28.0)", "mypy-boto3-iot1click-devices (>=1.27.0,<1.28.0)", "mypy-boto3-iot1click-projects (>=1.27.0,<1.28.0)", "mypy-boto3-iotanalytics (>=1.27.0,<1.28.0)", "mypy-boto3-iotdeviceadvisor (>=1.27.0,<1.28.0)", "mypy-boto3-iotevents (>=1.27.0,<1.28.0)", "mypy-boto3-iotevents-data (>=1.27.0,<1.28.0)", "mypy-boto3-iotfleethub (>=1.27.0,<1.28.0)", "mypy-boto3-iotfleetwise (>=1.27.0,<1.28.0)", "mypy-boto3-iotsecuretunneling (>=1.27.0,<1.28.0)", "mypy-boto3-iotsitewise (>=1.27.0,<1.28.0)", "mypy-boto3-iotthingsgraph (>=1.27.0,<1.28.0)", "mypy-boto3-iottwinmaker (>=1.27.0,<1.28.0)", "mypy-boto3-iotwireless (>=1.27.0,<1.28.0)", "mypy-boto3-ivs (>=1.27.0,<1.28.0)", "mypy-boto3-ivs-realtime (>=1.27.0,<1.28.0)", "mypy-boto3-ivschat (>=1.27.0,<1.28.0)", "mypy-boto3-kafka (>=1.27.0,<1.28.0)", "mypy-boto3-kafkaconnect (>=1.27.0,<1.28.0)", "mypy-boto3-kendra (>=1.27.0,<1.28.0)", "mypy-boto3-kendra-ranking (>=1.27.0,<1.28.0)", "mypy-boto3-keyspaces (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis-video-archived-media (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis-video-media (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis-video-signaling (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.27.0,<1.28.0)", "mypy-boto3-kinesisanalytics (>=1.27.0,<1.28.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.27.0,<1.28.0)", "mypy-boto3-kinesisvideo (>=1.27.0,<1.28.0)", "mypy-boto3-kms (>=1.27.0,<1.28.0)", "mypy-boto3-lakeformation (>=1.27.0,<1.28.0)", "mypy-boto3-lambda (>=1.27.0,<1.28.0)", "mypy-boto3-lex-models (>=1.27.0,<1.28.0)", "mypy-boto3-lex-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-lexv2-models (>=1.27.0,<1.28.0)", "mypy-boto3-lexv2-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-license-manager (>=1.27.0,<1.28.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.27.0,<1.28.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.27.0,<1.28.0)", "mypy-boto3-lightsail (>=1.27.0,<1.28.0)", "mypy-boto3-location (>=1.27.0,<1.28.0)", "mypy-boto3-logs (>=1.27.0,<1.28.0)", "mypy-boto3-lookoutequipment (>=1.27.0,<1.28.0)", "mypy-boto3-lookoutmetrics (>=1.27.0,<1.28.0)", "mypy-boto3-lookoutvision (>=1.27.0,<1.28.0)", "mypy-boto3-m2 (>=1.27.0,<1.28.0)", "mypy-boto3-machinelearning (>=1.27.0,<1.28.0)", "mypy-boto3-macie (>=1.27.0,<1.28.0)", "mypy-boto3-macie2 (>=1.27.0,<1.28.0)", "mypy-boto3-managedblockchain (>=1.27.0,<1.28.0)", "mypy-boto3-marketplace-catalog (>=1.27.0,<1.28.0)", "mypy-boto3-marketplace-entitlement (>=1.27.0,<1.28.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.27.0,<1.28.0)", "mypy-boto3-mediaconnect (>=1.27.0,<1.28.0)", "mypy-boto3-mediaconvert (>=1.27.0,<1.28.0)", "mypy-boto3-medialive (>=1.27.0,<1.28.0)", "mypy-boto3-mediapackage (>=1.27.0,<1.28.0)", "mypy-boto3-mediapackage-vod (>=1.27.0,<1.28.0)", "mypy-boto3-mediapackagev2 (>=1.27.0,<1.28.0)", "mypy-boto3-mediastore (>=1.27.0,<1.28.0)", "mypy-boto3-mediastore-data (>=1.27.0,<1.28.0)", "mypy-boto3-mediatailor (>=1.27.0,<1.28.0)", "mypy-boto3-memorydb (>=1.27.0,<1.28.0)", "mypy-boto3-meteringmarketplace (>=1.27.0,<1.28.0)", "mypy-boto3-mgh (>=1.27.0,<1.28.0)", "mypy-boto3-mgn (>=1.27.0,<1.28.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.27.0,<1.28.0)", "mypy-boto3-migrationhub-config (>=1.27.0,<1.28.0)", "mypy-boto3-migrationhuborchestrator (>=1.27.0,<1.28.0)", "mypy-boto3-migrationhubstrategy (>=1.27.0,<1.28.0)", "mypy-boto3-mobile (>=1.27.0,<1.28.0)", "mypy-boto3-mq (>=1.27.0,<1.28.0)", "mypy-boto3-mturk (>=1.27.0,<1.28.0)", "mypy-boto3-mwaa (>=1.27.0,<1.28.0)", "mypy-boto3-neptune (>=1.27.0,<1.28.0)", "mypy-boto3-network-firewall (>=1.27.0,<1.28.0)", "mypy-boto3-networkmanager (>=1.27.0,<1.28.0)", "mypy-boto3-nimble (>=1.27.0,<1.28.0)", "mypy-boto3-oam (>=1.27.0,<1.28.0)", "mypy-boto3-omics (>=1.27.0,<1.28.0)", "mypy-boto3-opensearch (>=1.27.0,<1.28.0)", "mypy-boto3-opensearchserverless (>=1.27.0,<1.28.0)", "mypy-boto3-opsworks (>=1.27.0,<1.28.0)", "mypy-boto3-opsworkscm (>=1.27.0,<1.28.0)", "mypy-boto3-organizations (>=1.27.0,<1.28.0)", "mypy-boto3-osis (>=1.27.0,<1.28.0)", "mypy-boto3-outposts (>=1.27.0,<1.28.0)", "mypy-boto3-panorama (>=1.27.0,<1.28.0)", "mypy-boto3-payment-cryptography (>=1.27.0,<1.28.0)", "mypy-boto3-payment-cryptography-data (>=1.27.0,<1.28.0)", "mypy-boto3-personalize (>=1.27.0,<1.28.0)", "mypy-boto3-personalize-events (>=1.27.0,<1.28.0)", "mypy-boto3-personalize-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-pi (>=1.27.0,<1.28.0)", "mypy-boto3-pinpoint (>=1.27.0,<1.28.0)", "mypy-boto3-pinpoint-email (>=1.27.0,<1.28.0)", "mypy-boto3-pinpoint-sms-voice (>=1.27.0,<1.28.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.27.0,<1.28.0)", "mypy-boto3-pipes (>=1.27.0,<1.28.0)", "mypy-boto3-polly (>=1.27.0,<1.28.0)", "mypy-boto3-pricing (>=1.27.0,<1.28.0)", "mypy-boto3-privatenetworks (>=1.27.0,<1.28.0)", "mypy-boto3-proton (>=1.27.0,<1.28.0)", "mypy-boto3-qldb (>=1.27.0,<1.28.0)", "mypy-boto3-qldb-session (>=1.27.0,<1.28.0)", "mypy-boto3-quicksight (>=1.27.0,<1.28.0)", "mypy-boto3-ram (>=1.27.0,<1.28.0)", "mypy-boto3-rbin (>=1.27.0,<1.28.0)", "mypy-boto3-rds (>=1.27.0,<1.28.0)", "mypy-boto3-rds-data (>=1.27.0,<1.28.0)", "mypy-boto3-redshift (>=1.27.0,<1.28.0)", "mypy-boto3-redshift-data (>=1.27.0,<1.28.0)", "mypy-boto3-redshift-serverless (>=1.27.0,<1.28.0)", "mypy-boto3-rekognition (>=1.27.0,<1.28.0)", "mypy-boto3-resiliencehub (>=1.27.0,<1.28.0)", "mypy-boto3-resource-explorer-2 (>=1.27.0,<1.28.0)", "mypy-boto3-resource-groups (>=1.27.0,<1.28.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.27.0,<1.28.0)", "mypy-boto3-robomaker (>=1.27.0,<1.28.0)", "mypy-boto3-rolesanywhere (>=1.27.0,<1.28.0)", "mypy-boto3-route53 (>=1.27.0,<1.28.0)", "mypy-boto3-route53-recovery-cluster (>=1.27.0,<1.28.0)", "mypy-boto3-route53-recovery-control-config (>=1.27.0,<1.28.0)", "mypy-boto3-route53-recovery-readiness (>=1.27.0,<1.28.0)", "mypy-boto3-route53domains (>=1.27.0,<1.28.0)", "mypy-boto3-route53resolver (>=1.27.0,<1.28.0)", "mypy-boto3-rum (>=1.27.0,<1.28.0)", "mypy-boto3-s3 (>=1.27.0,<1.28.0)", "mypy-boto3-s3control (>=1.27.0,<1.28.0)", "mypy-boto3-s3outposts (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-edge (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-geospatial (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-metrics (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-savingsplans (>=1.27.0,<1.28.0)", "mypy-boto3-scheduler (>=1.27.0,<1.28.0)", "mypy-boto3-schemas (>=1.27.0,<1.28.0)", "mypy-boto3-sdb (>=1.27.0,<1.28.0)", "mypy-boto3-secretsmanager (>=1.27.0,<1.28.0)", "mypy-boto3-securityhub (>=1.27.0,<1.28.0)", "mypy-boto3-securitylake (>=1.27.0,<1.28.0)", "mypy-boto3-serverlessrepo (>=1.27.0,<1.28.0)", "mypy-boto3-service-quotas (>=1.27.0,<1.28.0)", "mypy-boto3-servicecatalog (>=1.27.0,<1.28.0)", "mypy-boto3-servicecatalog-appregistry (>=1.27.0,<1.28.0)", "mypy-boto3-servicediscovery (>=1.27.0,<1.28.0)", "mypy-boto3-ses (>=1.27.0,<1.28.0)", "mypy-boto3-sesv2 (>=1.27.0,<1.28.0)", "mypy-boto3-shield (>=1.27.0,<1.28.0)", "mypy-boto3-signer (>=1.27.0,<1.28.0)", "mypy-boto3-simspaceweaver (>=1.27.0,<1.28.0)", "mypy-boto3-sms (>=1.27.0,<1.28.0)", "mypy-boto3-sms-voice (>=1.27.0,<1.28.0)", "mypy-boto3-snow-device-management (>=1.27.0,<1.28.0)", "mypy-boto3-snowball (>=1.27.0,<1.28.0)", "mypy-boto3-sns (>=1.27.0,<1.28.0)", "mypy-boto3-sqs (>=1.27.0,<1.28.0)", "mypy-boto3-ssm (>=1.27.0,<1.28.0)", "mypy-boto3-ssm-contacts (>=1.27.0,<1.28.0)", "mypy-boto3-ssm-incidents (>=1.27.0,<1.28.0)", "mypy-boto3-ssm-sap (>=1.27.0,<1.28.0)", "mypy-boto3-sso (>=1.27.0,<1.28.0)", "mypy-boto3-sso-admin (>=1.27.0,<1.28.0)", "mypy-boto3-sso-oidc (>=1.27.0,<1.28.0)", "mypy-boto3-stepfunctions (>=1.27.0,<1.28.0)", "mypy-boto3-storagegateway (>=1.27.0,<1.28.0)", "mypy-boto3-sts (>=1.27.0,<1.28.0)", "mypy-boto3-support (>=1.27.0,<1.28.0)", "mypy-boto3-support-app (>=1.27.0,<1.28.0)", "mypy-boto3-swf (>=1.27.0,<1.28.0)", "mypy-boto3-synthetics (>=1.27.0,<1.28.0)", "mypy-boto3-textract (>=1.27.0,<1.28.0)", "mypy-boto3-timestream-query (>=1.27.0,<1.28.0)", "mypy-boto3-timestream-write (>=1.27.0,<1.28.0)", "mypy-boto3-tnb (>=1.27.0,<1.28.0)", "mypy-boto3-transcribe (>=1.27.0,<1.28.0)", "mypy-boto3-transfer (>=1.27.0,<1.28.0)", "mypy-boto3-translate (>=1.27.0,<1.28.0)", "mypy-boto3-verifiedpermissions (>=1.27.0,<1.28.0)", "mypy-boto3-voice-id (>=1.27.0,<1.28.0)", "mypy-boto3-vpc-lattice (>=1.27.0,<1.28.0)", "mypy-boto3-waf (>=1.27.0,<1.28.0)", "mypy-boto3-waf-regional (>=1.27.0,<1.28.0)", "mypy-boto3-wafv2 (>=1.27.0,<1.28.0)", "mypy-boto3-wellarchitected (>=1.27.0,<1.28.0)", "mypy-boto3-wisdom (>=1.27.0,<1.28.0)", "mypy-boto3-workdocs (>=1.27.0,<1.28.0)", "mypy-boto3-worklink (>=1.27.0,<1.28.0)", "mypy-boto3-workmail (>=1.27.0,<1.28.0)", "mypy-boto3-workmailmessageflow (>=1.27.0,<1.28.0)", "mypy-boto3-workspaces (>=1.27.0,<1.28.0)", "mypy-boto3-workspaces-web (>=1.27.0,<1.28.0)", "mypy-boto3-xray (>=1.27.0,<1.28.0)"] -amp = ["mypy-boto3-amp (>=1.27.0,<1.28.0)"] -amplify = ["mypy-boto3-amplify (>=1.27.0,<1.28.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.27.0,<1.28.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.27.0,<1.28.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.27.0,<1.28.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.27.0,<1.28.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.27.0,<1.28.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.27.0,<1.28.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.27.0,<1.28.0)"] -appfabric = ["mypy-boto3-appfabric (>=1.27.0,<1.28.0)"] -appflow = ["mypy-boto3-appflow (>=1.27.0,<1.28.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.27.0,<1.28.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.27.0,<1.28.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.27.0,<1.28.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.27.0,<1.28.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.27.0,<1.28.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.27.0,<1.28.0)"] -appstream = ["mypy-boto3-appstream (>=1.27.0,<1.28.0)"] -appsync = ["mypy-boto3-appsync (>=1.27.0,<1.28.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.27.0,<1.28.0)"] -athena = ["mypy-boto3-athena (>=1.27.0,<1.28.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.27.0,<1.28.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.27.0,<1.28.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.27.0,<1.28.0)"] -backup = ["mypy-boto3-backup (>=1.27.0,<1.28.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.27.0,<1.28.0)"] -backupstorage = ["mypy-boto3-backupstorage (>=1.27.0,<1.28.0)"] -batch = ["mypy-boto3-batch (>=1.27.0,<1.28.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.27.0,<1.28.0)"] -boto3 = ["boto3 (==1.27.0)", "botocore (==1.30.0)"] -braket = ["mypy-boto3-braket (>=1.27.0,<1.28.0)"] -budgets = ["mypy-boto3-budgets (>=1.27.0,<1.28.0)"] -ce = ["mypy-boto3-ce (>=1.27.0,<1.28.0)"] -chime = ["mypy-boto3-chime (>=1.27.0,<1.28.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.27.0,<1.28.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.27.0,<1.28.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.27.0,<1.28.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.27.0,<1.28.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.27.0,<1.28.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.27.0,<1.28.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.27.0,<1.28.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.27.0,<1.28.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.27.0,<1.28.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.27.0,<1.28.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.27.0,<1.28.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.27.0,<1.28.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.27.0,<1.28.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.27.0,<1.28.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.27.0,<1.28.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.27.0,<1.28.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.27.0,<1.28.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.27.0,<1.28.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.27.0,<1.28.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.27.0,<1.28.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.27.0,<1.28.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.27.0,<1.28.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.27.0,<1.28.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.27.0,<1.28.0)"] -codeguru-security = ["mypy-boto3-codeguru-security (>=1.27.0,<1.28.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.27.0,<1.28.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.27.0,<1.28.0)"] -codestar = ["mypy-boto3-codestar (>=1.27.0,<1.28.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.27.0,<1.28.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.27.0,<1.28.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.27.0,<1.28.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.27.0,<1.28.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.27.0,<1.28.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.27.0,<1.28.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.27.0,<1.28.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.27.0,<1.28.0)"] -config = ["mypy-boto3-config (>=1.27.0,<1.28.0)"] -connect = ["mypy-boto3-connect (>=1.27.0,<1.28.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.27.0,<1.28.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.27.0,<1.28.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.27.0,<1.28.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.27.0,<1.28.0)"] -controltower = ["mypy-boto3-controltower (>=1.27.0,<1.28.0)"] -cur = ["mypy-boto3-cur (>=1.27.0,<1.28.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.27.0,<1.28.0)"] -databrew = ["mypy-boto3-databrew (>=1.27.0,<1.28.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.27.0,<1.28.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.27.0,<1.28.0)"] -datasync = ["mypy-boto3-datasync (>=1.27.0,<1.28.0)"] -dax = ["mypy-boto3-dax (>=1.27.0,<1.28.0)"] -detective = ["mypy-boto3-detective (>=1.27.0,<1.28.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.27.0,<1.28.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.27.0,<1.28.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.27.0,<1.28.0)"] -discovery = ["mypy-boto3-discovery (>=1.27.0,<1.28.0)"] -dlm = ["mypy-boto3-dlm (>=1.27.0,<1.28.0)"] -dms = ["mypy-boto3-dms (>=1.27.0,<1.28.0)"] -docdb = ["mypy-boto3-docdb (>=1.27.0,<1.28.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.27.0,<1.28.0)"] -drs = ["mypy-boto3-drs (>=1.27.0,<1.28.0)"] -ds = ["mypy-boto3-ds (>=1.27.0,<1.28.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.27.0,<1.28.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.27.0,<1.28.0)"] -ebs = ["mypy-boto3-ebs (>=1.27.0,<1.28.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.27.0,<1.28.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.27.0,<1.28.0)"] -ecr = ["mypy-boto3-ecr (>=1.27.0,<1.28.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.27.0,<1.28.0)"] -ecs = ["mypy-boto3-ecs (>=1.27.0,<1.28.0)"] -efs = ["mypy-boto3-efs (>=1.27.0,<1.28.0)"] -eks = ["mypy-boto3-eks (>=1.27.0,<1.28.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.27.0,<1.28.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.27.0,<1.28.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.27.0,<1.28.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.27.0,<1.28.0)"] -elb = ["mypy-boto3-elb (>=1.27.0,<1.28.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.27.0,<1.28.0)"] -emr = ["mypy-boto3-emr (>=1.27.0,<1.28.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.27.0,<1.28.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.27.0,<1.28.0)"] -es = ["mypy-boto3-es (>=1.27.0,<1.28.0)"] -essential = ["mypy-boto3-cloudformation (>=1.27.0,<1.28.0)", "mypy-boto3-dynamodb (>=1.27.0,<1.28.0)", "mypy-boto3-ec2 (>=1.27.0,<1.28.0)", "mypy-boto3-lambda (>=1.27.0,<1.28.0)", "mypy-boto3-rds (>=1.27.0,<1.28.0)", "mypy-boto3-s3 (>=1.27.0,<1.28.0)", "mypy-boto3-sqs (>=1.27.0,<1.28.0)"] -events = ["mypy-boto3-events (>=1.27.0,<1.28.0)"] -evidently = ["mypy-boto3-evidently (>=1.27.0,<1.28.0)"] -finspace = ["mypy-boto3-finspace (>=1.27.0,<1.28.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.27.0,<1.28.0)"] -firehose = ["mypy-boto3-firehose (>=1.27.0,<1.28.0)"] -fis = ["mypy-boto3-fis (>=1.27.0,<1.28.0)"] -fms = ["mypy-boto3-fms (>=1.27.0,<1.28.0)"] -forecast = ["mypy-boto3-forecast (>=1.27.0,<1.28.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.27.0,<1.28.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.27.0,<1.28.0)"] -fsx = ["mypy-boto3-fsx (>=1.27.0,<1.28.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.27.0,<1.28.0)"] -gamesparks = ["mypy-boto3-gamesparks (>=1.27.0,<1.28.0)"] -glacier = ["mypy-boto3-glacier (>=1.27.0,<1.28.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.27.0,<1.28.0)"] -glue = ["mypy-boto3-glue (>=1.27.0,<1.28.0)"] -grafana = ["mypy-boto3-grafana (>=1.27.0,<1.28.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.27.0,<1.28.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.27.0,<1.28.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.27.0,<1.28.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.27.0,<1.28.0)"] -health = ["mypy-boto3-health (>=1.27.0,<1.28.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.27.0,<1.28.0)"] -honeycode = ["mypy-boto3-honeycode (>=1.27.0,<1.28.0)"] -iam = ["mypy-boto3-iam (>=1.27.0,<1.28.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.27.0,<1.28.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.27.0,<1.28.0)"] -importexport = ["mypy-boto3-importexport (>=1.27.0,<1.28.0)"] -inspector = ["mypy-boto3-inspector (>=1.27.0,<1.28.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.27.0,<1.28.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.27.0,<1.28.0)"] -iot = ["mypy-boto3-iot (>=1.27.0,<1.28.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.27.0,<1.28.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.27.0,<1.28.0)"] -iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.27.0,<1.28.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.27.0,<1.28.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.27.0,<1.28.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.27.0,<1.28.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.27.0,<1.28.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.27.0,<1.28.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.27.0,<1.28.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.27.0,<1.28.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.27.0,<1.28.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.27.0,<1.28.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.27.0,<1.28.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.27.0,<1.28.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.27.0,<1.28.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.27.0,<1.28.0)"] -ivs = ["mypy-boto3-ivs (>=1.27.0,<1.28.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.27.0,<1.28.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.27.0,<1.28.0)"] -kafka = ["mypy-boto3-kafka (>=1.27.0,<1.28.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.27.0,<1.28.0)"] -kendra = ["mypy-boto3-kendra (>=1.27.0,<1.28.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.27.0,<1.28.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.27.0,<1.28.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.27.0,<1.28.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.27.0,<1.28.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.27.0,<1.28.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.27.0,<1.28.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.27.0,<1.28.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.27.0,<1.28.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.27.0,<1.28.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.27.0,<1.28.0)"] -kms = ["mypy-boto3-kms (>=1.27.0,<1.28.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.27.0,<1.28.0)"] -lambda = ["mypy-boto3-lambda (>=1.27.0,<1.28.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.27.0,<1.28.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.27.0,<1.28.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.27.0,<1.28.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.27.0,<1.28.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.27.0,<1.28.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.27.0,<1.28.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.27.0,<1.28.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.27.0,<1.28.0)"] -location = ["mypy-boto3-location (>=1.27.0,<1.28.0)"] -logs = ["mypy-boto3-logs (>=1.27.0,<1.28.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.27.0,<1.28.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.27.0,<1.28.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.27.0,<1.28.0)"] -m2 = ["mypy-boto3-m2 (>=1.27.0,<1.28.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.27.0,<1.28.0)"] -macie = ["mypy-boto3-macie (>=1.27.0,<1.28.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.27.0,<1.28.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.27.0,<1.28.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.27.0,<1.28.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.27.0,<1.28.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.27.0,<1.28.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.27.0,<1.28.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.27.0,<1.28.0)"] -medialive = ["mypy-boto3-medialive (>=1.27.0,<1.28.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.27.0,<1.28.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.27.0,<1.28.0)"] -mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.27.0,<1.28.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.27.0,<1.28.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.27.0,<1.28.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.27.0,<1.28.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.27.0,<1.28.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.27.0,<1.28.0)"] -mgh = ["mypy-boto3-mgh (>=1.27.0,<1.28.0)"] -mgn = ["mypy-boto3-mgn (>=1.27.0,<1.28.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.27.0,<1.28.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.27.0,<1.28.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.27.0,<1.28.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.27.0,<1.28.0)"] -mobile = ["mypy-boto3-mobile (>=1.27.0,<1.28.0)"] -mq = ["mypy-boto3-mq (>=1.27.0,<1.28.0)"] -mturk = ["mypy-boto3-mturk (>=1.27.0,<1.28.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.27.0,<1.28.0)"] -neptune = ["mypy-boto3-neptune (>=1.27.0,<1.28.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.27.0,<1.28.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.27.0,<1.28.0)"] -nimble = ["mypy-boto3-nimble (>=1.27.0,<1.28.0)"] -oam = ["mypy-boto3-oam (>=1.27.0,<1.28.0)"] -omics = ["mypy-boto3-omics (>=1.27.0,<1.28.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.27.0,<1.28.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.27.0,<1.28.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.27.0,<1.28.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.27.0,<1.28.0)"] -organizations = ["mypy-boto3-organizations (>=1.27.0,<1.28.0)"] -osis = ["mypy-boto3-osis (>=1.27.0,<1.28.0)"] -outposts = ["mypy-boto3-outposts (>=1.27.0,<1.28.0)"] -panorama = ["mypy-boto3-panorama (>=1.27.0,<1.28.0)"] -payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.27.0,<1.28.0)"] -payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.27.0,<1.28.0)"] -personalize = ["mypy-boto3-personalize (>=1.27.0,<1.28.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.27.0,<1.28.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.27.0,<1.28.0)"] -pi = ["mypy-boto3-pi (>=1.27.0,<1.28.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.27.0,<1.28.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.27.0,<1.28.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.27.0,<1.28.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.27.0,<1.28.0)"] -pipes = ["mypy-boto3-pipes (>=1.27.0,<1.28.0)"] -polly = ["mypy-boto3-polly (>=1.27.0,<1.28.0)"] -pricing = ["mypy-boto3-pricing (>=1.27.0,<1.28.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.27.0,<1.28.0)"] -proton = ["mypy-boto3-proton (>=1.27.0,<1.28.0)"] -qldb = ["mypy-boto3-qldb (>=1.27.0,<1.28.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.27.0,<1.28.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.27.0,<1.28.0)"] -ram = ["mypy-boto3-ram (>=1.27.0,<1.28.0)"] -rbin = ["mypy-boto3-rbin (>=1.27.0,<1.28.0)"] -rds = ["mypy-boto3-rds (>=1.27.0,<1.28.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.27.0,<1.28.0)"] -redshift = ["mypy-boto3-redshift (>=1.27.0,<1.28.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.27.0,<1.28.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.27.0,<1.28.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.27.0,<1.28.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.27.0,<1.28.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.27.0,<1.28.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.27.0,<1.28.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.27.0,<1.28.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.27.0,<1.28.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.27.0,<1.28.0)"] -route53 = ["mypy-boto3-route53 (>=1.27.0,<1.28.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.27.0,<1.28.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.27.0,<1.28.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.27.0,<1.28.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.27.0,<1.28.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.27.0,<1.28.0)"] -rum = ["mypy-boto3-rum (>=1.27.0,<1.28.0)"] -s3 = ["mypy-boto3-s3 (>=1.27.0,<1.28.0)"] -s3control = ["mypy-boto3-s3control (>=1.27.0,<1.28.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.27.0,<1.28.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.27.0,<1.28.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.27.0,<1.28.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.27.0,<1.28.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.27.0,<1.28.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.27.0,<1.28.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.27.0,<1.28.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.27.0,<1.28.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.27.0,<1.28.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.27.0,<1.28.0)"] -schemas = ["mypy-boto3-schemas (>=1.27.0,<1.28.0)"] -sdb = ["mypy-boto3-sdb (>=1.27.0,<1.28.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.27.0,<1.28.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.27.0,<1.28.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.27.0,<1.28.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.27.0,<1.28.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.27.0,<1.28.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.27.0,<1.28.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.27.0,<1.28.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.27.0,<1.28.0)"] -ses = ["mypy-boto3-ses (>=1.27.0,<1.28.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.27.0,<1.28.0)"] -shield = ["mypy-boto3-shield (>=1.27.0,<1.28.0)"] -signer = ["mypy-boto3-signer (>=1.27.0,<1.28.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.27.0,<1.28.0)"] -sms = ["mypy-boto3-sms (>=1.27.0,<1.28.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.27.0,<1.28.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.27.0,<1.28.0)"] -snowball = ["mypy-boto3-snowball (>=1.27.0,<1.28.0)"] -sns = ["mypy-boto3-sns (>=1.27.0,<1.28.0)"] -sqs = ["mypy-boto3-sqs (>=1.27.0,<1.28.0)"] -ssm = ["mypy-boto3-ssm (>=1.27.0,<1.28.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.27.0,<1.28.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.27.0,<1.28.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.27.0,<1.28.0)"] -sso = ["mypy-boto3-sso (>=1.27.0,<1.28.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.27.0,<1.28.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.27.0,<1.28.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.27.0,<1.28.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.27.0,<1.28.0)"] -sts = ["mypy-boto3-sts (>=1.27.0,<1.28.0)"] -support = ["mypy-boto3-support (>=1.27.0,<1.28.0)"] -support-app = ["mypy-boto3-support-app (>=1.27.0,<1.28.0)"] -swf = ["mypy-boto3-swf (>=1.27.0,<1.28.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.27.0,<1.28.0)"] -textract = ["mypy-boto3-textract (>=1.27.0,<1.28.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.27.0,<1.28.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.27.0,<1.28.0)"] -tnb = ["mypy-boto3-tnb (>=1.27.0,<1.28.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.27.0,<1.28.0)"] -transfer = ["mypy-boto3-transfer (>=1.27.0,<1.28.0)"] -translate = ["mypy-boto3-translate (>=1.27.0,<1.28.0)"] -verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.27.0,<1.28.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.27.0,<1.28.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.27.0,<1.28.0)"] -waf = ["mypy-boto3-waf (>=1.27.0,<1.28.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.27.0,<1.28.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.27.0,<1.28.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.27.0,<1.28.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.27.0,<1.28.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.27.0,<1.28.0)"] -worklink = ["mypy-boto3-worklink (>=1.27.0,<1.28.0)"] -workmail = ["mypy-boto3-workmail (>=1.27.0,<1.28.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.27.0,<1.28.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.27.0,<1.28.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.27.0,<1.28.0)"] -xray = ["mypy-boto3-xray (>=1.27.0,<1.28.0)"] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.28.0,<1.29.0)"] +account = ["mypy-boto3-account (>=1.28.0,<1.29.0)"] +acm = ["mypy-boto3-acm (>=1.28.0,<1.29.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.28.0,<1.29.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.28.0,<1.29.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.28.0,<1.29.0)", "mypy-boto3-account (>=1.28.0,<1.29.0)", "mypy-boto3-acm (>=1.28.0,<1.29.0)", "mypy-boto3-acm-pca (>=1.28.0,<1.29.0)", "mypy-boto3-alexaforbusiness (>=1.28.0,<1.29.0)", "mypy-boto3-amp (>=1.28.0,<1.29.0)", "mypy-boto3-amplify (>=1.28.0,<1.29.0)", "mypy-boto3-amplifybackend (>=1.28.0,<1.29.0)", "mypy-boto3-amplifyuibuilder (>=1.28.0,<1.29.0)", "mypy-boto3-apigateway (>=1.28.0,<1.29.0)", "mypy-boto3-apigatewaymanagementapi (>=1.28.0,<1.29.0)", "mypy-boto3-apigatewayv2 (>=1.28.0,<1.29.0)", "mypy-boto3-appconfig (>=1.28.0,<1.29.0)", "mypy-boto3-appconfigdata (>=1.28.0,<1.29.0)", "mypy-boto3-appfabric (>=1.28.0,<1.29.0)", "mypy-boto3-appflow (>=1.28.0,<1.29.0)", "mypy-boto3-appintegrations (>=1.28.0,<1.29.0)", "mypy-boto3-application-autoscaling (>=1.28.0,<1.29.0)", "mypy-boto3-application-insights (>=1.28.0,<1.29.0)", "mypy-boto3-applicationcostprofiler (>=1.28.0,<1.29.0)", "mypy-boto3-appmesh (>=1.28.0,<1.29.0)", "mypy-boto3-apprunner (>=1.28.0,<1.29.0)", "mypy-boto3-appstream (>=1.28.0,<1.29.0)", "mypy-boto3-appsync (>=1.28.0,<1.29.0)", "mypy-boto3-arc-zonal-shift (>=1.28.0,<1.29.0)", "mypy-boto3-athena (>=1.28.0,<1.29.0)", "mypy-boto3-auditmanager (>=1.28.0,<1.29.0)", "mypy-boto3-autoscaling (>=1.28.0,<1.29.0)", "mypy-boto3-autoscaling-plans (>=1.28.0,<1.29.0)", "mypy-boto3-backup (>=1.28.0,<1.29.0)", "mypy-boto3-backup-gateway (>=1.28.0,<1.29.0)", "mypy-boto3-backupstorage (>=1.28.0,<1.29.0)", "mypy-boto3-batch (>=1.28.0,<1.29.0)", "mypy-boto3-billingconductor (>=1.28.0,<1.29.0)", "mypy-boto3-braket (>=1.28.0,<1.29.0)", "mypy-boto3-budgets (>=1.28.0,<1.29.0)", "mypy-boto3-ce (>=1.28.0,<1.29.0)", "mypy-boto3-chime (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-identity (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-meetings (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-messaging (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-voice (>=1.28.0,<1.29.0)", "mypy-boto3-cleanrooms (>=1.28.0,<1.29.0)", "mypy-boto3-cloud9 (>=1.28.0,<1.29.0)", "mypy-boto3-cloudcontrol (>=1.28.0,<1.29.0)", "mypy-boto3-clouddirectory (>=1.28.0,<1.29.0)", "mypy-boto3-cloudformation (>=1.28.0,<1.29.0)", "mypy-boto3-cloudfront (>=1.28.0,<1.29.0)", "mypy-boto3-cloudhsm (>=1.28.0,<1.29.0)", "mypy-boto3-cloudhsmv2 (>=1.28.0,<1.29.0)", "mypy-boto3-cloudsearch (>=1.28.0,<1.29.0)", "mypy-boto3-cloudsearchdomain (>=1.28.0,<1.29.0)", "mypy-boto3-cloudtrail (>=1.28.0,<1.29.0)", "mypy-boto3-cloudtrail-data (>=1.28.0,<1.29.0)", "mypy-boto3-cloudwatch (>=1.28.0,<1.29.0)", "mypy-boto3-codeartifact (>=1.28.0,<1.29.0)", "mypy-boto3-codebuild (>=1.28.0,<1.29.0)", "mypy-boto3-codecatalyst (>=1.28.0,<1.29.0)", "mypy-boto3-codecommit (>=1.28.0,<1.29.0)", "mypy-boto3-codedeploy (>=1.28.0,<1.29.0)", "mypy-boto3-codeguru-reviewer (>=1.28.0,<1.29.0)", "mypy-boto3-codeguru-security (>=1.28.0,<1.29.0)", "mypy-boto3-codeguruprofiler (>=1.28.0,<1.29.0)", "mypy-boto3-codepipeline (>=1.28.0,<1.29.0)", "mypy-boto3-codestar (>=1.28.0,<1.29.0)", "mypy-boto3-codestar-connections (>=1.28.0,<1.29.0)", "mypy-boto3-codestar-notifications (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-identity (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-idp (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-sync (>=1.28.0,<1.29.0)", "mypy-boto3-comprehend (>=1.28.0,<1.29.0)", "mypy-boto3-comprehendmedical (>=1.28.0,<1.29.0)", "mypy-boto3-compute-optimizer (>=1.28.0,<1.29.0)", "mypy-boto3-config (>=1.28.0,<1.29.0)", "mypy-boto3-connect (>=1.28.0,<1.29.0)", "mypy-boto3-connect-contact-lens (>=1.28.0,<1.29.0)", "mypy-boto3-connectcampaigns (>=1.28.0,<1.29.0)", "mypy-boto3-connectcases (>=1.28.0,<1.29.0)", "mypy-boto3-connectparticipant (>=1.28.0,<1.29.0)", "mypy-boto3-controltower (>=1.28.0,<1.29.0)", "mypy-boto3-cur (>=1.28.0,<1.29.0)", "mypy-boto3-customer-profiles (>=1.28.0,<1.29.0)", "mypy-boto3-databrew (>=1.28.0,<1.29.0)", "mypy-boto3-dataexchange (>=1.28.0,<1.29.0)", "mypy-boto3-datapipeline (>=1.28.0,<1.29.0)", "mypy-boto3-datasync (>=1.28.0,<1.29.0)", "mypy-boto3-dax (>=1.28.0,<1.29.0)", "mypy-boto3-detective (>=1.28.0,<1.29.0)", "mypy-boto3-devicefarm (>=1.28.0,<1.29.0)", "mypy-boto3-devops-guru (>=1.28.0,<1.29.0)", "mypy-boto3-directconnect (>=1.28.0,<1.29.0)", "mypy-boto3-discovery (>=1.28.0,<1.29.0)", "mypy-boto3-dlm (>=1.28.0,<1.29.0)", "mypy-boto3-dms (>=1.28.0,<1.29.0)", "mypy-boto3-docdb (>=1.28.0,<1.29.0)", "mypy-boto3-docdb-elastic (>=1.28.0,<1.29.0)", "mypy-boto3-drs (>=1.28.0,<1.29.0)", "mypy-boto3-ds (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodb (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodbstreams (>=1.28.0,<1.29.0)", "mypy-boto3-ebs (>=1.28.0,<1.29.0)", "mypy-boto3-ec2 (>=1.28.0,<1.29.0)", "mypy-boto3-ec2-instance-connect (>=1.28.0,<1.29.0)", "mypy-boto3-ecr (>=1.28.0,<1.29.0)", "mypy-boto3-ecr-public (>=1.28.0,<1.29.0)", "mypy-boto3-ecs (>=1.28.0,<1.29.0)", "mypy-boto3-efs (>=1.28.0,<1.29.0)", "mypy-boto3-eks (>=1.28.0,<1.29.0)", "mypy-boto3-elastic-inference (>=1.28.0,<1.29.0)", "mypy-boto3-elasticache (>=1.28.0,<1.29.0)", "mypy-boto3-elasticbeanstalk (>=1.28.0,<1.29.0)", "mypy-boto3-elastictranscoder (>=1.28.0,<1.29.0)", "mypy-boto3-elb (>=1.28.0,<1.29.0)", "mypy-boto3-elbv2 (>=1.28.0,<1.29.0)", "mypy-boto3-emr (>=1.28.0,<1.29.0)", "mypy-boto3-emr-containers (>=1.28.0,<1.29.0)", "mypy-boto3-emr-serverless (>=1.28.0,<1.29.0)", "mypy-boto3-entityresolution (>=1.28.0,<1.29.0)", "mypy-boto3-es (>=1.28.0,<1.29.0)", "mypy-boto3-events (>=1.28.0,<1.29.0)", "mypy-boto3-evidently (>=1.28.0,<1.29.0)", "mypy-boto3-finspace (>=1.28.0,<1.29.0)", "mypy-boto3-finspace-data (>=1.28.0,<1.29.0)", "mypy-boto3-firehose (>=1.28.0,<1.29.0)", "mypy-boto3-fis (>=1.28.0,<1.29.0)", "mypy-boto3-fms (>=1.28.0,<1.29.0)", "mypy-boto3-forecast (>=1.28.0,<1.29.0)", "mypy-boto3-forecastquery (>=1.28.0,<1.29.0)", "mypy-boto3-frauddetector (>=1.28.0,<1.29.0)", "mypy-boto3-fsx (>=1.28.0,<1.29.0)", "mypy-boto3-gamelift (>=1.28.0,<1.29.0)", "mypy-boto3-gamesparks (>=1.28.0,<1.29.0)", "mypy-boto3-glacier (>=1.28.0,<1.29.0)", "mypy-boto3-globalaccelerator (>=1.28.0,<1.29.0)", "mypy-boto3-glue (>=1.28.0,<1.29.0)", "mypy-boto3-grafana (>=1.28.0,<1.29.0)", "mypy-boto3-greengrass (>=1.28.0,<1.29.0)", "mypy-boto3-greengrassv2 (>=1.28.0,<1.29.0)", "mypy-boto3-groundstation (>=1.28.0,<1.29.0)", "mypy-boto3-guardduty (>=1.28.0,<1.29.0)", "mypy-boto3-health (>=1.28.0,<1.29.0)", "mypy-boto3-healthlake (>=1.28.0,<1.29.0)", "mypy-boto3-honeycode (>=1.28.0,<1.29.0)", "mypy-boto3-iam (>=1.28.0,<1.29.0)", "mypy-boto3-identitystore (>=1.28.0,<1.29.0)", "mypy-boto3-imagebuilder (>=1.28.0,<1.29.0)", "mypy-boto3-importexport (>=1.28.0,<1.29.0)", "mypy-boto3-inspector (>=1.28.0,<1.29.0)", "mypy-boto3-inspector2 (>=1.28.0,<1.29.0)", "mypy-boto3-internetmonitor (>=1.28.0,<1.29.0)", "mypy-boto3-iot (>=1.28.0,<1.29.0)", "mypy-boto3-iot-data (>=1.28.0,<1.29.0)", "mypy-boto3-iot-jobs-data (>=1.28.0,<1.29.0)", "mypy-boto3-iot-roborunner (>=1.28.0,<1.29.0)", "mypy-boto3-iot1click-devices (>=1.28.0,<1.29.0)", "mypy-boto3-iot1click-projects (>=1.28.0,<1.29.0)", "mypy-boto3-iotanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-iotdeviceadvisor (>=1.28.0,<1.29.0)", "mypy-boto3-iotevents (>=1.28.0,<1.29.0)", "mypy-boto3-iotevents-data (>=1.28.0,<1.29.0)", "mypy-boto3-iotfleethub (>=1.28.0,<1.29.0)", "mypy-boto3-iotfleetwise (>=1.28.0,<1.29.0)", "mypy-boto3-iotsecuretunneling (>=1.28.0,<1.29.0)", "mypy-boto3-iotsitewise (>=1.28.0,<1.29.0)", "mypy-boto3-iotthingsgraph (>=1.28.0,<1.29.0)", "mypy-boto3-iottwinmaker (>=1.28.0,<1.29.0)", "mypy-boto3-iotwireless (>=1.28.0,<1.29.0)", "mypy-boto3-ivs (>=1.28.0,<1.29.0)", "mypy-boto3-ivs-realtime (>=1.28.0,<1.29.0)", "mypy-boto3-ivschat (>=1.28.0,<1.29.0)", "mypy-boto3-kafka (>=1.28.0,<1.29.0)", "mypy-boto3-kafkaconnect (>=1.28.0,<1.29.0)", "mypy-boto3-kendra (>=1.28.0,<1.29.0)", "mypy-boto3-kendra-ranking (>=1.28.0,<1.29.0)", "mypy-boto3-keyspaces (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-archived-media (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-media (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-signaling (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisvideo (>=1.28.0,<1.29.0)", "mypy-boto3-kms (>=1.28.0,<1.29.0)", "mypy-boto3-lakeformation (>=1.28.0,<1.29.0)", "mypy-boto3-lambda (>=1.28.0,<1.29.0)", "mypy-boto3-lex-models (>=1.28.0,<1.29.0)", "mypy-boto3-lex-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-lexv2-models (>=1.28.0,<1.29.0)", "mypy-boto3-lexv2-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.28.0,<1.29.0)", "mypy-boto3-lightsail (>=1.28.0,<1.29.0)", "mypy-boto3-location (>=1.28.0,<1.29.0)", "mypy-boto3-logs (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutequipment (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutmetrics (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutvision (>=1.28.0,<1.29.0)", "mypy-boto3-m2 (>=1.28.0,<1.29.0)", "mypy-boto3-machinelearning (>=1.28.0,<1.29.0)", "mypy-boto3-macie (>=1.28.0,<1.29.0)", "mypy-boto3-macie2 (>=1.28.0,<1.29.0)", "mypy-boto3-managedblockchain (>=1.28.0,<1.29.0)", "mypy-boto3-managedblockchain-query (>=1.28.0,<1.29.0)", "mypy-boto3-marketplace-catalog (>=1.28.0,<1.29.0)", "mypy-boto3-marketplace-entitlement (>=1.28.0,<1.29.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-mediaconnect (>=1.28.0,<1.29.0)", "mypy-boto3-mediaconvert (>=1.28.0,<1.29.0)", "mypy-boto3-medialive (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackage (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackage-vod (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackagev2 (>=1.28.0,<1.29.0)", "mypy-boto3-mediastore (>=1.28.0,<1.29.0)", "mypy-boto3-mediastore-data (>=1.28.0,<1.29.0)", "mypy-boto3-mediatailor (>=1.28.0,<1.29.0)", "mypy-boto3-medical-imaging (>=1.28.0,<1.29.0)", "mypy-boto3-memorydb (>=1.28.0,<1.29.0)", "mypy-boto3-meteringmarketplace (>=1.28.0,<1.29.0)", "mypy-boto3-mgh (>=1.28.0,<1.29.0)", "mypy-boto3-mgn (>=1.28.0,<1.29.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhub-config (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhuborchestrator (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhubstrategy (>=1.28.0,<1.29.0)", "mypy-boto3-mobile (>=1.28.0,<1.29.0)", "mypy-boto3-mq (>=1.28.0,<1.29.0)", "mypy-boto3-mturk (>=1.28.0,<1.29.0)", "mypy-boto3-mwaa (>=1.28.0,<1.29.0)", "mypy-boto3-neptune (>=1.28.0,<1.29.0)", "mypy-boto3-neptunedata (>=1.28.0,<1.29.0)", "mypy-boto3-network-firewall (>=1.28.0,<1.29.0)", "mypy-boto3-networkmanager (>=1.28.0,<1.29.0)", "mypy-boto3-nimble (>=1.28.0,<1.29.0)", "mypy-boto3-oam (>=1.28.0,<1.29.0)", "mypy-boto3-omics (>=1.28.0,<1.29.0)", "mypy-boto3-opensearch (>=1.28.0,<1.29.0)", "mypy-boto3-opensearchserverless (>=1.28.0,<1.29.0)", "mypy-boto3-opsworks (>=1.28.0,<1.29.0)", "mypy-boto3-opsworkscm (>=1.28.0,<1.29.0)", "mypy-boto3-organizations (>=1.28.0,<1.29.0)", "mypy-boto3-osis (>=1.28.0,<1.29.0)", "mypy-boto3-outposts (>=1.28.0,<1.29.0)", "mypy-boto3-panorama (>=1.28.0,<1.29.0)", "mypy-boto3-payment-cryptography (>=1.28.0,<1.29.0)", "mypy-boto3-payment-cryptography-data (>=1.28.0,<1.29.0)", "mypy-boto3-pca-connector-ad (>=1.28.0,<1.29.0)", "mypy-boto3-personalize (>=1.28.0,<1.29.0)", "mypy-boto3-personalize-events (>=1.28.0,<1.29.0)", "mypy-boto3-personalize-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-pi (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-email (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-sms-voice (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.28.0,<1.29.0)", "mypy-boto3-pipes (>=1.28.0,<1.29.0)", "mypy-boto3-polly (>=1.28.0,<1.29.0)", "mypy-boto3-pricing (>=1.28.0,<1.29.0)", "mypy-boto3-privatenetworks (>=1.28.0,<1.29.0)", "mypy-boto3-proton (>=1.28.0,<1.29.0)", "mypy-boto3-qldb (>=1.28.0,<1.29.0)", "mypy-boto3-qldb-session (>=1.28.0,<1.29.0)", "mypy-boto3-quicksight (>=1.28.0,<1.29.0)", "mypy-boto3-ram (>=1.28.0,<1.29.0)", "mypy-boto3-rbin (>=1.28.0,<1.29.0)", "mypy-boto3-rds (>=1.28.0,<1.29.0)", "mypy-boto3-rds-data (>=1.28.0,<1.29.0)", "mypy-boto3-redshift (>=1.28.0,<1.29.0)", "mypy-boto3-redshift-data (>=1.28.0,<1.29.0)", "mypy-boto3-redshift-serverless (>=1.28.0,<1.29.0)", "mypy-boto3-rekognition (>=1.28.0,<1.29.0)", "mypy-boto3-resiliencehub (>=1.28.0,<1.29.0)", "mypy-boto3-resource-explorer-2 (>=1.28.0,<1.29.0)", "mypy-boto3-resource-groups (>=1.28.0,<1.29.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.28.0,<1.29.0)", "mypy-boto3-robomaker (>=1.28.0,<1.29.0)", "mypy-boto3-rolesanywhere (>=1.28.0,<1.29.0)", "mypy-boto3-route53 (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-cluster (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-control-config (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-readiness (>=1.28.0,<1.29.0)", "mypy-boto3-route53domains (>=1.28.0,<1.29.0)", "mypy-boto3-route53resolver (>=1.28.0,<1.29.0)", "mypy-boto3-rum (>=1.28.0,<1.29.0)", "mypy-boto3-s3 (>=1.28.0,<1.29.0)", "mypy-boto3-s3control (>=1.28.0,<1.29.0)", "mypy-boto3-s3outposts (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-edge (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-geospatial (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-metrics (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-savingsplans (>=1.28.0,<1.29.0)", "mypy-boto3-scheduler (>=1.28.0,<1.29.0)", "mypy-boto3-schemas (>=1.28.0,<1.29.0)", "mypy-boto3-sdb (>=1.28.0,<1.29.0)", "mypy-boto3-secretsmanager (>=1.28.0,<1.29.0)", "mypy-boto3-securityhub (>=1.28.0,<1.29.0)", "mypy-boto3-securitylake (>=1.28.0,<1.29.0)", "mypy-boto3-serverlessrepo (>=1.28.0,<1.29.0)", "mypy-boto3-service-quotas (>=1.28.0,<1.29.0)", "mypy-boto3-servicecatalog (>=1.28.0,<1.29.0)", "mypy-boto3-servicecatalog-appregistry (>=1.28.0,<1.29.0)", "mypy-boto3-servicediscovery (>=1.28.0,<1.29.0)", "mypy-boto3-ses (>=1.28.0,<1.29.0)", "mypy-boto3-sesv2 (>=1.28.0,<1.29.0)", "mypy-boto3-shield (>=1.28.0,<1.29.0)", "mypy-boto3-signer (>=1.28.0,<1.29.0)", "mypy-boto3-simspaceweaver (>=1.28.0,<1.29.0)", "mypy-boto3-sms (>=1.28.0,<1.29.0)", "mypy-boto3-sms-voice (>=1.28.0,<1.29.0)", "mypy-boto3-snow-device-management (>=1.28.0,<1.29.0)", "mypy-boto3-snowball (>=1.28.0,<1.29.0)", "mypy-boto3-sns (>=1.28.0,<1.29.0)", "mypy-boto3-sqs (>=1.28.0,<1.29.0)", "mypy-boto3-ssm (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-contacts (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-incidents (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-sap (>=1.28.0,<1.29.0)", "mypy-boto3-sso (>=1.28.0,<1.29.0)", "mypy-boto3-sso-admin (>=1.28.0,<1.29.0)", "mypy-boto3-sso-oidc (>=1.28.0,<1.29.0)", "mypy-boto3-stepfunctions (>=1.28.0,<1.29.0)", "mypy-boto3-storagegateway (>=1.28.0,<1.29.0)", "mypy-boto3-sts (>=1.28.0,<1.29.0)", "mypy-boto3-support (>=1.28.0,<1.29.0)", "mypy-boto3-support-app (>=1.28.0,<1.29.0)", "mypy-boto3-swf (>=1.28.0,<1.29.0)", "mypy-boto3-synthetics (>=1.28.0,<1.29.0)", "mypy-boto3-textract (>=1.28.0,<1.29.0)", "mypy-boto3-timestream-query (>=1.28.0,<1.29.0)", "mypy-boto3-timestream-write (>=1.28.0,<1.29.0)", "mypy-boto3-tnb (>=1.28.0,<1.29.0)", "mypy-boto3-transcribe (>=1.28.0,<1.29.0)", "mypy-boto3-transfer (>=1.28.0,<1.29.0)", "mypy-boto3-translate (>=1.28.0,<1.29.0)", "mypy-boto3-verifiedpermissions (>=1.28.0,<1.29.0)", "mypy-boto3-voice-id (>=1.28.0,<1.29.0)", "mypy-boto3-vpc-lattice (>=1.28.0,<1.29.0)", "mypy-boto3-waf (>=1.28.0,<1.29.0)", "mypy-boto3-waf-regional (>=1.28.0,<1.29.0)", "mypy-boto3-wafv2 (>=1.28.0,<1.29.0)", "mypy-boto3-wellarchitected (>=1.28.0,<1.29.0)", "mypy-boto3-wisdom (>=1.28.0,<1.29.0)", "mypy-boto3-workdocs (>=1.28.0,<1.29.0)", "mypy-boto3-worklink (>=1.28.0,<1.29.0)", "mypy-boto3-workmail (>=1.28.0,<1.29.0)", "mypy-boto3-workmailmessageflow (>=1.28.0,<1.29.0)", "mypy-boto3-workspaces (>=1.28.0,<1.29.0)", "mypy-boto3-workspaces-web (>=1.28.0,<1.29.0)", "mypy-boto3-xray (>=1.28.0,<1.29.0)"] +amp = ["mypy-boto3-amp (>=1.28.0,<1.29.0)"] +amplify = ["mypy-boto3-amplify (>=1.28.0,<1.29.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.28.0,<1.29.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.28.0,<1.29.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.28.0,<1.29.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.28.0,<1.29.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.28.0,<1.29.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.28.0,<1.29.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.28.0,<1.29.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.28.0,<1.29.0)"] +appflow = ["mypy-boto3-appflow (>=1.28.0,<1.29.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.28.0,<1.29.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.28.0,<1.29.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.28.0,<1.29.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.28.0,<1.29.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.28.0,<1.29.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.28.0,<1.29.0)"] +appstream = ["mypy-boto3-appstream (>=1.28.0,<1.29.0)"] +appsync = ["mypy-boto3-appsync (>=1.28.0,<1.29.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.28.0,<1.29.0)"] +athena = ["mypy-boto3-athena (>=1.28.0,<1.29.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.28.0,<1.29.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.28.0,<1.29.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.28.0,<1.29.0)"] +backup = ["mypy-boto3-backup (>=1.28.0,<1.29.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.28.0,<1.29.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.28.0,<1.29.0)"] +batch = ["mypy-boto3-batch (>=1.28.0,<1.29.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.28.0,<1.29.0)"] +boto3 = ["boto3 (==1.28.52)", "botocore (==1.31.52)"] +braket = ["mypy-boto3-braket (>=1.28.0,<1.29.0)"] +budgets = ["mypy-boto3-budgets (>=1.28.0,<1.29.0)"] +ce = ["mypy-boto3-ce (>=1.28.0,<1.29.0)"] +chime = ["mypy-boto3-chime (>=1.28.0,<1.29.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.28.0,<1.29.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.28.0,<1.29.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.28.0,<1.29.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.28.0,<1.29.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.28.0,<1.29.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.28.0,<1.29.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.28.0,<1.29.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.28.0,<1.29.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.28.0,<1.29.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.28.0,<1.29.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.28.0,<1.29.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.28.0,<1.29.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.28.0,<1.29.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.28.0,<1.29.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.28.0,<1.29.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.28.0,<1.29.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.28.0,<1.29.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.28.0,<1.29.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.28.0,<1.29.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.28.0,<1.29.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.28.0,<1.29.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.28.0,<1.29.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.28.0,<1.29.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.28.0,<1.29.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.28.0,<1.29.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.28.0,<1.29.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.28.0,<1.29.0)"] +codestar = ["mypy-boto3-codestar (>=1.28.0,<1.29.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.28.0,<1.29.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.28.0,<1.29.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.28.0,<1.29.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.28.0,<1.29.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.28.0,<1.29.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.28.0,<1.29.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.28.0,<1.29.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.28.0,<1.29.0)"] +config = ["mypy-boto3-config (>=1.28.0,<1.29.0)"] +connect = ["mypy-boto3-connect (>=1.28.0,<1.29.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.28.0,<1.29.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.28.0,<1.29.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.28.0,<1.29.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.28.0,<1.29.0)"] +controltower = ["mypy-boto3-controltower (>=1.28.0,<1.29.0)"] +cur = ["mypy-boto3-cur (>=1.28.0,<1.29.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.28.0,<1.29.0)"] +databrew = ["mypy-boto3-databrew (>=1.28.0,<1.29.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.28.0,<1.29.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.28.0,<1.29.0)"] +datasync = ["mypy-boto3-datasync (>=1.28.0,<1.29.0)"] +dax = ["mypy-boto3-dax (>=1.28.0,<1.29.0)"] +detective = ["mypy-boto3-detective (>=1.28.0,<1.29.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.28.0,<1.29.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.28.0,<1.29.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.28.0,<1.29.0)"] +discovery = ["mypy-boto3-discovery (>=1.28.0,<1.29.0)"] +dlm = ["mypy-boto3-dlm (>=1.28.0,<1.29.0)"] +dms = ["mypy-boto3-dms (>=1.28.0,<1.29.0)"] +docdb = ["mypy-boto3-docdb (>=1.28.0,<1.29.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.28.0,<1.29.0)"] +drs = ["mypy-boto3-drs (>=1.28.0,<1.29.0)"] +ds = ["mypy-boto3-ds (>=1.28.0,<1.29.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.28.0,<1.29.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.28.0,<1.29.0)"] +ebs = ["mypy-boto3-ebs (>=1.28.0,<1.29.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.28.0,<1.29.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.28.0,<1.29.0)"] +ecr = ["mypy-boto3-ecr (>=1.28.0,<1.29.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.28.0,<1.29.0)"] +ecs = ["mypy-boto3-ecs (>=1.28.0,<1.29.0)"] +efs = ["mypy-boto3-efs (>=1.28.0,<1.29.0)"] +eks = ["mypy-boto3-eks (>=1.28.0,<1.29.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.28.0,<1.29.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.28.0,<1.29.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.28.0,<1.29.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.28.0,<1.29.0)"] +elb = ["mypy-boto3-elb (>=1.28.0,<1.29.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.28.0,<1.29.0)"] +emr = ["mypy-boto3-emr (>=1.28.0,<1.29.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.28.0,<1.29.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.28.0,<1.29.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.28.0,<1.29.0)"] +es = ["mypy-boto3-es (>=1.28.0,<1.29.0)"] +essential = ["mypy-boto3-cloudformation (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodb (>=1.28.0,<1.29.0)", "mypy-boto3-ec2 (>=1.28.0,<1.29.0)", "mypy-boto3-lambda (>=1.28.0,<1.29.0)", "mypy-boto3-rds (>=1.28.0,<1.29.0)", "mypy-boto3-s3 (>=1.28.0,<1.29.0)", "mypy-boto3-sqs (>=1.28.0,<1.29.0)"] +events = ["mypy-boto3-events (>=1.28.0,<1.29.0)"] +evidently = ["mypy-boto3-evidently (>=1.28.0,<1.29.0)"] +finspace = ["mypy-boto3-finspace (>=1.28.0,<1.29.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.28.0,<1.29.0)"] +firehose = ["mypy-boto3-firehose (>=1.28.0,<1.29.0)"] +fis = ["mypy-boto3-fis (>=1.28.0,<1.29.0)"] +fms = ["mypy-boto3-fms (>=1.28.0,<1.29.0)"] +forecast = ["mypy-boto3-forecast (>=1.28.0,<1.29.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.28.0,<1.29.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.28.0,<1.29.0)"] +fsx = ["mypy-boto3-fsx (>=1.28.0,<1.29.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.28.0,<1.29.0)"] +gamesparks = ["mypy-boto3-gamesparks (>=1.28.0,<1.29.0)"] +glacier = ["mypy-boto3-glacier (>=1.28.0,<1.29.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.28.0,<1.29.0)"] +glue = ["mypy-boto3-glue (>=1.28.0,<1.29.0)"] +grafana = ["mypy-boto3-grafana (>=1.28.0,<1.29.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.28.0,<1.29.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.28.0,<1.29.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.28.0,<1.29.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.28.0,<1.29.0)"] +health = ["mypy-boto3-health (>=1.28.0,<1.29.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.28.0,<1.29.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.28.0,<1.29.0)"] +iam = ["mypy-boto3-iam (>=1.28.0,<1.29.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.28.0,<1.29.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.28.0,<1.29.0)"] +importexport = ["mypy-boto3-importexport (>=1.28.0,<1.29.0)"] +inspector = ["mypy-boto3-inspector (>=1.28.0,<1.29.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.28.0,<1.29.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.28.0,<1.29.0)"] +iot = ["mypy-boto3-iot (>=1.28.0,<1.29.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.28.0,<1.29.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.28.0,<1.29.0)"] +iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.28.0,<1.29.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.28.0,<1.29.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.28.0,<1.29.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.28.0,<1.29.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.28.0,<1.29.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.28.0,<1.29.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.28.0,<1.29.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.28.0,<1.29.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.28.0,<1.29.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.28.0,<1.29.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.28.0,<1.29.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.28.0,<1.29.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.28.0,<1.29.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.28.0,<1.29.0)"] +ivs = ["mypy-boto3-ivs (>=1.28.0,<1.29.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.28.0,<1.29.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.28.0,<1.29.0)"] +kafka = ["mypy-boto3-kafka (>=1.28.0,<1.29.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.28.0,<1.29.0)"] +kendra = ["mypy-boto3-kendra (>=1.28.0,<1.29.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.28.0,<1.29.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.28.0,<1.29.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.28.0,<1.29.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.28.0,<1.29.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.28.0,<1.29.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.28.0,<1.29.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.28.0,<1.29.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.28.0,<1.29.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.28.0,<1.29.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.28.0,<1.29.0)"] +kms = ["mypy-boto3-kms (>=1.28.0,<1.29.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.28.0,<1.29.0)"] +lambda = ["mypy-boto3-lambda (>=1.28.0,<1.29.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.28.0,<1.29.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.28.0,<1.29.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.28.0,<1.29.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.28.0,<1.29.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.28.0,<1.29.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.28.0,<1.29.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.28.0,<1.29.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.28.0,<1.29.0)"] +location = ["mypy-boto3-location (>=1.28.0,<1.29.0)"] +logs = ["mypy-boto3-logs (>=1.28.0,<1.29.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.28.0,<1.29.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.28.0,<1.29.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.28.0,<1.29.0)"] +m2 = ["mypy-boto3-m2 (>=1.28.0,<1.29.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.28.0,<1.29.0)"] +macie = ["mypy-boto3-macie (>=1.28.0,<1.29.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.28.0,<1.29.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.28.0,<1.29.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.28.0,<1.29.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.28.0,<1.29.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.28.0,<1.29.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.28.0,<1.29.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.28.0,<1.29.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.28.0,<1.29.0)"] +medialive = ["mypy-boto3-medialive (>=1.28.0,<1.29.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.28.0,<1.29.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.28.0,<1.29.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.28.0,<1.29.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.28.0,<1.29.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.28.0,<1.29.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.28.0,<1.29.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.28.0,<1.29.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.28.0,<1.29.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.28.0,<1.29.0)"] +mgh = ["mypy-boto3-mgh (>=1.28.0,<1.29.0)"] +mgn = ["mypy-boto3-mgn (>=1.28.0,<1.29.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.28.0,<1.29.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.28.0,<1.29.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.28.0,<1.29.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.28.0,<1.29.0)"] +mobile = ["mypy-boto3-mobile (>=1.28.0,<1.29.0)"] +mq = ["mypy-boto3-mq (>=1.28.0,<1.29.0)"] +mturk = ["mypy-boto3-mturk (>=1.28.0,<1.29.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.28.0,<1.29.0)"] +neptune = ["mypy-boto3-neptune (>=1.28.0,<1.29.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.28.0,<1.29.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.28.0,<1.29.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.28.0,<1.29.0)"] +nimble = ["mypy-boto3-nimble (>=1.28.0,<1.29.0)"] +oam = ["mypy-boto3-oam (>=1.28.0,<1.29.0)"] +omics = ["mypy-boto3-omics (>=1.28.0,<1.29.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.28.0,<1.29.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.28.0,<1.29.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.28.0,<1.29.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.28.0,<1.29.0)"] +organizations = ["mypy-boto3-organizations (>=1.28.0,<1.29.0)"] +osis = ["mypy-boto3-osis (>=1.28.0,<1.29.0)"] +outposts = ["mypy-boto3-outposts (>=1.28.0,<1.29.0)"] +panorama = ["mypy-boto3-panorama (>=1.28.0,<1.29.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.28.0,<1.29.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.28.0,<1.29.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.28.0,<1.29.0)"] +personalize = ["mypy-boto3-personalize (>=1.28.0,<1.29.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.28.0,<1.29.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.28.0,<1.29.0)"] +pi = ["mypy-boto3-pi (>=1.28.0,<1.29.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.28.0,<1.29.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.28.0,<1.29.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.28.0,<1.29.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.28.0,<1.29.0)"] +pipes = ["mypy-boto3-pipes (>=1.28.0,<1.29.0)"] +polly = ["mypy-boto3-polly (>=1.28.0,<1.29.0)"] +pricing = ["mypy-boto3-pricing (>=1.28.0,<1.29.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.28.0,<1.29.0)"] +proton = ["mypy-boto3-proton (>=1.28.0,<1.29.0)"] +qldb = ["mypy-boto3-qldb (>=1.28.0,<1.29.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.28.0,<1.29.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.28.0,<1.29.0)"] +ram = ["mypy-boto3-ram (>=1.28.0,<1.29.0)"] +rbin = ["mypy-boto3-rbin (>=1.28.0,<1.29.0)"] +rds = ["mypy-boto3-rds (>=1.28.0,<1.29.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.28.0,<1.29.0)"] +redshift = ["mypy-boto3-redshift (>=1.28.0,<1.29.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.28.0,<1.29.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.28.0,<1.29.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.28.0,<1.29.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.28.0,<1.29.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.28.0,<1.29.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.28.0,<1.29.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.28.0,<1.29.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.28.0,<1.29.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.28.0,<1.29.0)"] +route53 = ["mypy-boto3-route53 (>=1.28.0,<1.29.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.28.0,<1.29.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.28.0,<1.29.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.28.0,<1.29.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.28.0,<1.29.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.28.0,<1.29.0)"] +rum = ["mypy-boto3-rum (>=1.28.0,<1.29.0)"] +s3 = ["mypy-boto3-s3 (>=1.28.0,<1.29.0)"] +s3control = ["mypy-boto3-s3control (>=1.28.0,<1.29.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.28.0,<1.29.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.28.0,<1.29.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.28.0,<1.29.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.28.0,<1.29.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.28.0,<1.29.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.28.0,<1.29.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.28.0,<1.29.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.28.0,<1.29.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.28.0,<1.29.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.28.0,<1.29.0)"] +schemas = ["mypy-boto3-schemas (>=1.28.0,<1.29.0)"] +sdb = ["mypy-boto3-sdb (>=1.28.0,<1.29.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.28.0,<1.29.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.28.0,<1.29.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.28.0,<1.29.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.28.0,<1.29.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.28.0,<1.29.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.28.0,<1.29.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.28.0,<1.29.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.28.0,<1.29.0)"] +ses = ["mypy-boto3-ses (>=1.28.0,<1.29.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.28.0,<1.29.0)"] +shield = ["mypy-boto3-shield (>=1.28.0,<1.29.0)"] +signer = ["mypy-boto3-signer (>=1.28.0,<1.29.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.28.0,<1.29.0)"] +sms = ["mypy-boto3-sms (>=1.28.0,<1.29.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.28.0,<1.29.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.28.0,<1.29.0)"] +snowball = ["mypy-boto3-snowball (>=1.28.0,<1.29.0)"] +sns = ["mypy-boto3-sns (>=1.28.0,<1.29.0)"] +sqs = ["mypy-boto3-sqs (>=1.28.0,<1.29.0)"] +ssm = ["mypy-boto3-ssm (>=1.28.0,<1.29.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.28.0,<1.29.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.28.0,<1.29.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.28.0,<1.29.0)"] +sso = ["mypy-boto3-sso (>=1.28.0,<1.29.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.28.0,<1.29.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.28.0,<1.29.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.28.0,<1.29.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.28.0,<1.29.0)"] +sts = ["mypy-boto3-sts (>=1.28.0,<1.29.0)"] +support = ["mypy-boto3-support (>=1.28.0,<1.29.0)"] +support-app = ["mypy-boto3-support-app (>=1.28.0,<1.29.0)"] +swf = ["mypy-boto3-swf (>=1.28.0,<1.29.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.28.0,<1.29.0)"] +textract = ["mypy-boto3-textract (>=1.28.0,<1.29.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.28.0,<1.29.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.28.0,<1.29.0)"] +tnb = ["mypy-boto3-tnb (>=1.28.0,<1.29.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.28.0,<1.29.0)"] +transfer = ["mypy-boto3-transfer (>=1.28.0,<1.29.0)"] +translate = ["mypy-boto3-translate (>=1.28.0,<1.29.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.28.0,<1.29.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.28.0,<1.29.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.28.0,<1.29.0)"] +waf = ["mypy-boto3-waf (>=1.28.0,<1.29.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.28.0,<1.29.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.28.0,<1.29.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.28.0,<1.29.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.28.0,<1.29.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.28.0,<1.29.0)"] +worklink = ["mypy-boto3-worklink (>=1.28.0,<1.29.0)"] +workmail = ["mypy-boto3-workmail (>=1.28.0,<1.29.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.28.0,<1.29.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.28.0,<1.29.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.28.0,<1.29.0)"] +xray = ["mypy-boto3-xray (>=1.28.0,<1.29.0)"] [[package]] name = "botocore" -version = "1.21.65" +version = "1.31.52" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "botocore-1.21.65-py3-none-any.whl", hash = "sha256:3bd0e3d6daee6afcc747d596b52158519abe1ce36f906d556b9f8b54faa081e8"}, - {file = "botocore-1.21.65.tar.gz", hash = "sha256:6437d6a3999a189e7d45b3fcd8f794a46670fb255ae670c946d3f224caa8b46a"}, + {file = "botocore-1.31.52-py3-none-any.whl", hash = "sha256:46b0a75a38521aa6a75fddccb1542e002930e609d4e13516f40fef170d32e515"}, + {file = "botocore-1.31.52.tar.gz", hash = "sha256:6d09881c5a8be34b497872ca3936f8757d886a6f42f2a8703411928189cfedc0"}, ] [package.dependencies] -jmespath = ">=0.7.1,<1.0.0" +jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.12.5)"] +crt = ["awscrt (==0.16.26)"] [[package]] name = "botocore-stubs" @@ -851,6 +865,94 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "dependency-injector" +version = "4.41.0" +description = "Dependency injection framework for Python" +optional = false +python-versions = "*" +files = [ + {file = "dependency-injector-4.41.0.tar.gz", hash = "sha256:939dfc657104bc3e66b67afd3fb2ebb0850c9a1e73d0d26066f2bbdd8735ff9c"}, + {file = "dependency_injector-4.41.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2381a251b04244125148298212550750e6e1403e9b2850cc62e0e829d050ad3"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75280dfa23f7c88e1bf56c3920d58a43516816de6f6ab2a6650bb8a0f27d5c2c"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63bfba21f8bff654a80e9b9d06dd6c43a442990b73bf89cd471314c11c541ec2"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3535d06416251715b45f8412482b58ec1c6196a4a3baa207f947f0b03a7c4b44"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d09c08c944a25dabfb454238c1a889acd85102b93ae497de523bf9ab7947b28a"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:586a0821720b15932addbefb00f7370fbcd5831d6ebbd6494d774b44ff96d23a"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7fa4970f12a3fc95d8796938b11c41276ad1ff4c447b0e589212eab3fc527a90"}, + {file = "dependency_injector-4.41.0-cp310-cp310-win32.whl", hash = "sha256:d557e40673de984f78dab13ebd68d27fbb2f16d7c4e3b663ea2fa2f9fae6765b"}, + {file = "dependency_injector-4.41.0-cp310-cp310-win_amd64.whl", hash = "sha256:3744c327d18408e74781bd6d8b7738745ee80ef89f2c8daecf9ebd098cb84972"}, + {file = "dependency_injector-4.41.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:89c67edffe7007cf33cee79ecbca38f48efcc2add5c280717af434db6c789377"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:786f7aac592e191c9caafc47732161d807bad65c62f260cd84cd73c7e2d67d6d"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b61a15bc46a3aa7b29bd8a7384b650aa3a7ef943491e93c49a0540a0b3dda4"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4f113e5d4c3070973ad76e5bda7317e500abae6083d78689f0b6e37cf403abf"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fa3ed8f0700e47a0e7363f949b4525ffa8277aa1c5b10ca5b41fce4dea61bb9"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e15ea0f2b14c1127e8b0d1597fef13f98845679f63bf670ba12dbfc12a16ef"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3055b3fc47a0d6e5f27defb4166c0d37543a4967c279549b154afaf506ce6efc"}, + {file = "dependency_injector-4.41.0-cp311-cp311-win32.whl", hash = "sha256:37d5954026e3831663518d78bdf4be9c2dbfea691edcb73c813aa3093aa4363a"}, + {file = "dependency_injector-4.41.0-cp311-cp311-win_amd64.whl", hash = "sha256:f89a507e389b7e4d4892dd9a6f5f4da25849e24f73275478634ac594d621ab3f"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ac79f3c05747f9724bd56c06985e78331fc6c85eb50f3e3f1a35e0c60f9977e9"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75e7a733b372db3144a34020c4233f6b94db2c6342d6d16bc5245b1b941ee2bd"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40936d9384363331910abd59dd244158ec3572abf9d37322f15095315ac99893"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a31d9d60be4b585585081109480cfb2ef564d3b851cb32a139bf8408411a93a"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:953bfac819d32dc72b963767589e0ed372e5e9e78b03fb6b89419d0500d34bbe"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8f0090ff14038f17a026ca408a3a0b0e7affb6aa7498b2b59d670f40ac970fbe"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6b29abac56ce347d2eb58a560723e1663ee2125cf5cc38866ed92b84319927ec"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-win32.whl", hash = "sha256:059fbb48333148143e8667a5323d162628dfe27c386bd0ed3deeecfc390338bf"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-win_amd64.whl", hash = "sha256:16de2797dcfcc2263b8672bf0751166f7c7b369ca2ff9246ceb67b65f8e1d802"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c71d30b6708438050675f338edb9a25bea6c258478dbe5ec8405286756a2d347"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d283aee588a72072439e6721cb64aa6cba5bc18c576ef0ab28285a6ec7a9d655"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc852da612c7e347f2fcf921df2eca2718697a49f648a28a63db3ab504fd9510"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02620454ee8101f77a317f3229935ce687480883d72a40858ff4b0c87c935cce"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7a92680bea1c260e5c0d2d6cd60b0c913cba76a456a147db5ac047ecfcfcc758"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:168334cba3f1cbf55299ef38f0f2e31879115cc767b780c859f7814a52d80abb"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:48b6886a87b4ceb9b9f78550f77b2a5c7d2ce33bc83efd886556ad468cc9c85a"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-win32.whl", hash = "sha256:87be84084a1b922c4ba15e2e5aa900ee24b78a5467997cb7aec0a1d6cdb4a00b"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8b8cf1c6c56f5c18bdbd9f5e93b52ca29cb4d99606d4056e91f0c761eef496dc"}, + {file = "dependency_injector-4.41.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a8686fa330c83251c75c8238697686f7a0e0f6d40658538089165dc72df9bcff"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d670a844268dcd758195e58e9a5b39fc74bb8648aba99a13135a4a10ec9cfac"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3b9d41e0eff4c8e16fea1e33de66ff0030fe51137ca530f3c52ce110447914"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a724e0a737baadb4378f5dc1b079867cc3a88552fcca719b3dba84716828b2"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3588bd887b051d16b8bcabaae1127eb14059a0719a8fe34c8a75ba59321b352c"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:409441122f40e1b4b8582845fdd76deb9dc5c9d6eb74a057b85736ef9e9c671f"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7dcba8665cafec825b7095d5dd80afb5cf14404450eca3fe8b66e1edbf4dbc10"}, + {file = "dependency_injector-4.41.0-cp38-cp38-win32.whl", hash = "sha256:8b51efeaebacaf79ef68edfc65e9687699ccffb3538c4a3ab30d0d77e2db7189"}, + {file = "dependency_injector-4.41.0-cp38-cp38-win_amd64.whl", hash = "sha256:1662e2ef60ac6e681b9e11b5d8b7c17a0f733688916cf695f9540f8f50a61b1e"}, + {file = "dependency_injector-4.41.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51217cb384b468d7cc355544cec20774859f00812f9a1a71ed7fa701c957b2a7"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3890a12423ae3a9eade035093beba487f8d092ee6c6cb8706f4e7080a56e819"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99ed73b1521bf249e2823a08a730c9f9413a58f4b4290da022e0ad4fb333ba3d"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:300838e9d4f3fbf539892a5a4072851728e23b37a1f467afcf393edd994d88f0"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56d37b9d2f50a18f059d9abdbea7669a7518bd42b81603c21a27910a2b3f1657"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4a44ca3ce5867513a70b31855b218be3d251f5068ce1c480cc3a4ad24ffd3280"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:67b369592c57549ccdcad0d5fef1ddb9d39af7fed8083d76e789ab0111fc6389"}, + {file = "dependency_injector-4.41.0-cp39-cp39-win32.whl", hash = "sha256:740a8e8106a04d3f44b52b25b80570fdac96a8a3934423de7c9202c5623e7936"}, + {file = "dependency_injector-4.41.0-cp39-cp39-win_amd64.whl", hash = "sha256:22b11dbf696e184f0b3d5ac4e5418aeac3c379ba4ea758c04a83869b7e5d1cbf"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b365a8548e9a49049fa6acb24d3cd939f619eeb8e300ca3e156e44402dcc07ec"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5168dc59808317dc4cdd235aa5d7d556d33e5600156acaf224cead236b48a3e8"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3229d83e99e255451605d5276604386e06ad948e3d60f31ddd796781c77f76f"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1baee908f21190bdc46a65ce4c417a5175e9397ca62354928694fce218f84487"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b37f36ecb0c1227f697e1d4a029644e3eda8dd0f0716aa63ad04d96dbb15bbbb"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b0c9c966ff66c77364a2d43d08de9968aff7e3903938fe912ba49796b2133344"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12e91ac0333e7e589421943ff6c6bf9cf0d9ac9703301cec37ccff3723406332"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2440b32474d4e747209528ca3ae48f42563b2fbe3d74dbfe949c11dfbfef7c4"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54032d62610cf2f4421c9d92cef52957215aaa0bca403cda580c58eb3f726eda"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:76b94c8310929e54136f3cb3de3adc86d1a657b3984299f40bf1cd2ba0bae548"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6ee9810841c6e0599356cb884d16453bfca6ab739d0e4f0248724ed8f9ee0d79"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b98945edae88e777091bf0848f869fb94bd76dfa4066d7c870a5caa933391d0"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2dee5d4abdd21f1a30a51d46645c095be9dcc404c7c6e9f81d0a01415a49e64"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03f5fa0fa98a18bd0dfce846db80e2798607f0b861f1f99c97f441f7669d7a2"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f2842e15bae664a9f69932e922b02afa055c91efec959cb1896f6c499bf68180"}, +] + +[package.dependencies] +six = ">=1.7.0,<=1.16.0" + +[package.extras] +aiohttp = ["aiohttp"] +flask = ["flask"] +pydantic = ["pydantic"] +yaml = ["pyyaml"] + [[package]] name = "deprecated" version = "1.2.14" @@ -2341,6 +2443,104 @@ dmypy = ["psutil (>=4.0)"] install-types = ["pip"] reports = ["lxml"] +[[package]] +name = "mypy-boto3-cloudformation" +version = "1.28.48" +description = "Type annotations for boto3.CloudFormation 1.28.48 service generated with mypy-boto3-builder 7.19.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-cloudformation-1.28.48.tar.gz", hash = "sha256:efbe4073397800824287c8d52a65383112862f903b16fd587b5113449652371b"}, + {file = "mypy_boto3_cloudformation-1.28.48-py3-none-any.whl", hash = "sha256:653e14414abc9fab8d29d693f138639a1325322fb6ba1b06ca90a6ccb11dfd94"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-dynamodb" +version = "1.28.36" +description = "Type annotations for boto3.DynamoDB 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-dynamodb-1.28.36.tar.gz", hash = "sha256:5fe1d336fdc8c58f345c9c1b4e4c1a2d164660531cf3a074d4598975fb2687de"}, + {file = "mypy_boto3_dynamodb-1.28.36-py3-none-any.whl", hash = "sha256:9a3b49385d17e421661ab8639fc09cc64a706198be20287f82d83511289294a3"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-ec2" +version = "1.28.51" +description = "Type annotations for boto3.EC2 1.28.51 service generated with mypy-boto3-builder 7.19.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-ec2-1.28.51.tar.gz", hash = "sha256:e62204a712e89a14e0663d3d18813ac8072706684b0e89a1b04a7d01c10f9ebe"}, + {file = "mypy_boto3_ec2-1.28.51-py3-none-any.whl", hash = "sha256:f9162f00b144cf2fab5b8b56d7883674b1d4096c79a7226b592918eae17e6235"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-lambda" +version = "1.28.36" +description = "Type annotations for boto3.Lambda 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-lambda-1.28.36.tar.gz", hash = "sha256:70498e6ff6bfd60b758553d27fadf691ba169572faca01c2bd457da0b48b9cff"}, + {file = "mypy_boto3_lambda-1.28.36-py3-none-any.whl", hash = "sha256:edb1f49279f7713929a70eaab00cf3d4ba65a10016db636805d022b2eaf14c84"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-rds" +version = "1.28.41" +description = "Type annotations for boto3.RDS 1.28.41 service generated with mypy-boto3-builder 7.18.2" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-rds-1.28.41.tar.gz", hash = "sha256:19b7d6a1e3e53ff7d03157052885ceead29861fba2d9cbddf701f6238159cb72"}, + {file = "mypy_boto3_rds-1.28.41-py3-none-any.whl", hash = "sha256:2935be3736147b726964e3cebcb918d0fc394e4012b9b7d3d0d35c12aa740ddf"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-s3" +version = "1.28.52" +description = "Type annotations for boto3.S3 1.28.52 service generated with mypy-boto3-builder 7.19.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-s3-1.28.52.tar.gz", hash = "sha256:179cb7542cc5ef656f1323ad51eb237afcba77d1e5ed07d21a013fe36effb8b2"}, + {file = "mypy_boto3_s3-1.28.52-py3-none-any.whl", hash = "sha256:a75cd5ff28f1cb5109dd50db94259436701208fa97c61b5a2cc0689e169b7cba"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-sqs" +version = "1.28.36" +description = "Type annotations for boto3.SQS 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-sqs-1.28.36.tar.gz", hash = "sha256:d9c159e020f0ef225a6d5850a3673e8b236327243ba5ffe0d13762ae4fdc0e21"}, + {file = "mypy_boto3_sqs-1.28.36-py3-none-any.whl", hash = "sha256:8457aa9f2a6da44e8543e547597773f67a04e517f6a398989117cf1fa3f70d6e"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -2838,6 +3038,7 @@ files = [ [package.dependencies] email-validator = {version = ">=1.0.3", optional = true, markers = "extra == \"email\""} +python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""} typing-extensions = ">=4.2.0" [package.extras] @@ -3173,6 +3374,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "python3-saml" version = "1.15.0" @@ -3568,13 +3783,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.5.2" +version = "0.6.2" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, - {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, + {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, + {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, ] [package.dependencies] @@ -4227,4 +4442,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "efb9ed885d265cf45f832527246ad2393306e46e0c8ffe72da9af0dab19dadfa" +content-hash = "4a0a1f9123bc6217c1c2427e0a217e605497e140f5cb98c916c17b4209b82467" diff --git a/pyproject.toml b/pyproject.toml index 58081cafa6..131250f319 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,6 +85,7 @@ module = [ "core.model.integration", "core.model.library", "core.selftest", + "core.service.*", "core.settings.*", "core.util.authentication_for_opds", "core.util.cache", @@ -162,11 +163,11 @@ version = "0" # Version number is managed with tags in git alembic = {extras = ["tz"], version = "^1.8.1"} aws-xray-sdk = "~2.12" bcrypt = "^4.0.1" -boto3 = "~1.18" -botocore = "~1.21" +boto3 = "^1.28" certifi = "*" click = "^8.1.3" contextlib2 = "21.6.0" +dependency-injector = "^4.41" expiringdict = "1.2.2" feedparser = "6.0.10" firebase-admin = "^6.0.1" @@ -190,7 +191,7 @@ opensearch-py = "~1.1" palace-webpub-manifest-parser = "~3.0.1" pillow = "^10.0" pycryptodome = "^3.18" -pydantic = {version = "^1.10.9", extras = ["email"]} +pydantic = {version = "^1.10.9", extras = ["dotenv", "email"]} pyinstrument = "<4.6" PyJWT = "^2.8" PyLD = "2.0.3" @@ -229,8 +230,7 @@ tox-docker = "^4.1" tox-gh-actions = "^3.0" [tool.poetry.group.dev.dependencies] -boto3-stubs = "^1.26.81" -botocore-stubs = "^1.29.81" +boto3-stubs = {version = "^1.28", extras = ["boto3", "essential", "s3"]} freezegun = "~1.2.2" Jinja2 = "^3.1.2" mypy = "^1.4.1" diff --git a/scripts.py b/scripts.py index 07b2421684..2ea03abea3 100644 --- a/scripts.py +++ b/scripts.py @@ -55,7 +55,6 @@ get_one, pg_advisory_lock, ) -from core.model.configuration import ExternalIntegrationLink from core.scripts import ( IdentifierInputScript, LaneSweeperScript, @@ -194,7 +193,7 @@ def __init__(self, _db=None, cmd_args=None, manager=None, *args, **kwargs): super().__init__(_db, *args, **kwargs) self.parse_args(cmd_args) if not manager: - manager = CirculationManager(self._db) + manager = CirculationManager(self._db, self.services) from api.app import app app.manager = manager @@ -652,26 +651,14 @@ def process_lane(self, lane, exporter=None): ) return - # To find the storage integration for the exporter, first find the - # external integration link associated with the exporter's external - # integration. - integration_link = get_one( - self._db, - ExternalIntegrationLink, - external_integration_id=exporter.integration.id, - purpose=ExternalIntegrationLink.MARC, - ) - # Then use the "other" integration value to find the storage integration. - storage_integration = get_one( - self._db, ExternalIntegration, id=integration_link.other_integration_id - ) - - if not storage_integration: - self.log.info("No storage External Integration was found.") + # Find the storage service + storage_service = self.services.storage.public() + if not storage_service: + self.log.info("No storage service was found.") return # First update the file with ALL the records. - records = exporter.records(lane, annotator, storage_integration) + records = exporter.records(lane, annotator, storage_service) # Then create a new file with changes since the last update. start_time = None @@ -680,7 +667,7 @@ def process_lane(self, lane, exporter=None): start_time = last_update - timedelta(days=1) records = exporter.records( - lane, annotator, storage_integration, start_time=start_time + lane, annotator, storage_service, start_time=start_time ) diff --git a/tests/api/admin/controller/test_catalog_services.py b/tests/api/admin/controller/test_catalog_services.py index fda8a836a3..6a6038ad91 100644 --- a/tests/api/admin/controller/test_catalog_services.py +++ b/tests/api/admin/controller/test_catalog_services.py @@ -8,7 +8,6 @@ from api.admin.problem_details import ( CANNOT_CHANGE_PROTOCOL, INTEGRATION_NAME_ALREADY_IN_USE, - MISSING_INTEGRATION, MISSING_SERVICE, MULTIPLE_SERVICES_FOR_LIBRARY, UNKNOWN_PROTOCOL, @@ -21,8 +20,6 @@ create, get_one, ) -from core.model.configuration import ExternalIntegrationLink -from core.s3 import S3UploaderConfiguration from tests.fixtures.api_admin import SettingsControllerFixture @@ -159,63 +156,8 @@ def test_catalog_services_post_errors( goal=ExternalIntegration.CATALOG_GOAL, ) - # Attempt to set an S3 mirror external integration but it does not exist! - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - ME = MARCExporter - flask.request.form = ImmutableMultiDict( - [ - ("name", "exporter name"), - ("id", str(service.id)), - ("protocol", ME.NAME), - ("mirror_integration_id", "1234"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response.uri == MISSING_INTEGRATION.uri - - s3, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - ) - - # Now an S3 integration exists, but it has no MARC bucket configured. - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - ME = MARCExporter - flask.request.form = ImmutableMultiDict( - [ - ("name", "exporter name"), - ("id", str(service.id)), - ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response.uri == MISSING_INTEGRATION.uri - - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) - settings_ctrl_fixture.ctrl.db.session.flush() - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "new name"), - ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), - ] - ) - pytest.raises( - AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services, - ) - # This should be the last test to check since rolling back database # changes in the test can cause it to crash. - s3.setting(S3UploaderConfiguration.MARC_BUCKET_KEY).value = "marc-files" service.libraries += [settings_ctrl_fixture.ctrl.db.default_library()] settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) @@ -225,7 +167,6 @@ def test_catalog_services_post_errors( [ ("name", "new name"), ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), ( "libraries", json.dumps( @@ -250,20 +191,11 @@ def test_catalog_services_post_create( ): ME = MARCExporter - s3, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - ) - s3.setting(S3UploaderConfiguration.MARC_BUCKET_KEY).value = "marc-files" - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "exporter name"), ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), ( "libraries", json.dumps( @@ -289,24 +221,11 @@ def test_catalog_services_post_create( goal=ExternalIntegration.CATALOG_GOAL, ) assert isinstance(service, ExternalIntegration) - # There was one S3 integration and it was selected. The service has an - # External Integration Link to the storage integration that is created - # in a POST with purpose of ExternalIntegrationLink.MARC. - integration_link = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegrationLink, - external_integration_id=service.id, - purpose=ExternalIntegrationLink.MARC, - ) - assert isinstance(integration_link, ExternalIntegrationLink) assert service.id == int(response.get_data()) assert ME.NAME == service.protocol assert "exporter name" == service.name assert [settings_ctrl_fixture.ctrl.db.default_library()] == service.libraries - # We expect the Catalog external integration to have a link to the - # S3 storage external integration - assert s3.id == integration_link.other_integration_id assert ( "false" == ConfigurationSetting.for_library_and_externalintegration( @@ -331,14 +250,6 @@ def test_catalog_services_post_edit( ): ME = MARCExporter - s3, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - ) - s3.setting(S3UploaderConfiguration.MARC_BUCKET_KEY).value = "marc-files" - service, ignore = create( settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, @@ -353,7 +264,6 @@ def test_catalog_services_post_edit( ("name", "exporter name"), ("id", str(service.id)), ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), ( "libraries", json.dumps( @@ -373,17 +283,9 @@ def test_catalog_services_post_edit( ) assert response.status_code == 200 - integration_link = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegrationLink, - external_integration_id=service.id, - purpose=ExternalIntegrationLink.MARC, - ) - assert isinstance(integration_link, ExternalIntegrationLink) assert service.id == int(response.get_data()) assert ME.NAME == service.protocol assert "exporter name" == service.name - assert s3.id == integration_link.other_integration_id assert [settings_ctrl_fixture.ctrl.db.default_library()] == service.libraries assert ( "false" diff --git a/tests/api/admin/controller/test_storage_services.py b/tests/api/admin/controller/test_storage_services.py deleted file mode 100644 index 643c266b51..0000000000 --- a/tests/api/admin/controller/test_storage_services.py +++ /dev/null @@ -1,27 +0,0 @@ -from api.admin.controller.storage_services import StorageServicesController -from core.model import ExternalIntegration -from core.s3 import S3Uploader - - -class TestStorageServices: - def test_storage_service_management(self, settings_ctrl_fixture): - class MockStorage(StorageServicesController): - def _get_integration_protocols(self, apis, protocol_name_attr): - self.manage_called_with = (apis, protocol_name_attr) - - def _delete_integration(self, *args): - self.delete_called_with = args - - controller = MockStorage(settings_ctrl_fixture.manager) - EI = ExternalIntegration - with settings_ctrl_fixture.request_context_with_admin("/"): - controller.process_services() - (apis, procotol_name) = controller.manage_called_with - - assert S3Uploader in apis - assert procotol_name == "NAME" - - with settings_ctrl_fixture.request_context_with_admin("/"): - id = object() - controller.process_delete(id) - assert (id, EI.STORAGE_GOAL) == controller.delete_called_with diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index 465aec0e7f..6dae466615 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -680,29 +680,6 @@ def test_process_search_service_self_tests(self, fixture: AdminRouteFixture): fixture.assert_supported_methods(url, "GET", "POST") -class TestAdminStorageServices: - CONTROLLER_NAME = "admin_storage_services_controller" - - @pytest.fixture(scope="function") - def fixture(self, admin_route_fixture: AdminRouteFixture) -> AdminRouteFixture: - admin_route_fixture.set_controller_name(self.CONTROLLER_NAME) - return admin_route_fixture - - def test_process_services(self, fixture: AdminRouteFixture): - url = "/admin/storage_services" - fixture.assert_authenticated_request_calls( - url, fixture.controller.process_services # type: ignore - ) - fixture.assert_supported_methods(url, "GET", "POST") - - def test_process_delete(self, fixture: AdminRouteFixture): - url = "/admin/storage_service/" - fixture.assert_authenticated_request_calls( - url, fixture.controller.process_delete, "", http_method="DELETE" # type: ignore - ) - fixture.assert_supported_methods(url, "DELETE") - - class TestAdminCatalogServices: CONTROLLER_NAME = "admin_catalog_services_controller" diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 20d39e22d0..603e67b2c6 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -35,7 +35,6 @@ "tests.fixtures.opds2_files", "tests.fixtures.opds_files", "tests.fixtures.overdrive", - "tests.fixtures.s3", "tests.fixtures.sample_covers", "tests.fixtures.search", "tests.fixtures.time", diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index f3f3b1cab7..ff1392c83a 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -1,6 +1,10 @@ import logging from abc import ABC from collections import defaultdict +from typing import Optional +from unittest.mock import MagicMock + +from sqlalchemy.orm import Session from api.circulation import BaseCirculationAPI, CirculationAPI, HoldInfo, LoanInfo from api.controller import CirculationManager @@ -8,6 +12,7 @@ from core.integration.settings import BaseSettings from core.model import DataSource, Hold, Loan, get_one_or_create from core.model.configuration import ExternalIntegration +from core.service.container import Services from tests.mocks.search import ExternalSearchIndexFake @@ -167,6 +172,11 @@ def api_for_license_pool(self, licensepool): class MockCirculationManager(CirculationManager): d_circulation: MockCirculationAPI + def __init__(self, db: Session, services: Optional[Services] = None): + if services is None: + services = MagicMock(spec=Services) + super().__init__(db, services) + def setup_search(self): """Set up a search client.""" integration, _ = get_one_or_create( diff --git a/tests/api/test_controller_cm.py b/tests/api/test_controller_cm.py index e376c9d5c1..06ff4d81aa 100644 --- a/tests/api/test_controller_cm.py +++ b/tests/api/test_controller_cm.py @@ -154,7 +154,7 @@ class BadSearch(CirculationManager): def setup_search(self): raise Exception("doomed!") - circulation = BadSearch(circulation_fixture.db.session) + circulation = BadSearch(circulation_fixture.db.session, MagicMock()) # We didn't get a search object. assert None == circulation.external_search diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/test_controller_opdsfeed.py index dd81e7b5c5..22a21471f8 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/test_controller_opdsfeed.py @@ -674,7 +674,7 @@ class BadSearch(CirculationManager): def setup_search(self): raise Exception("doomed!") - circulation = BadSearch(circulation_fixture.db.session) + circulation = BadSearch(circulation_fixture.db.session, MagicMock()) # An attempt to call FeedController.search() will return a # problem detail. diff --git a/tests/api/test_google_analytics_provider.py b/tests/api/test_google_analytics_provider.py index ceee85fd6c..26682ceb9e 100644 --- a/tests/api/test_google_analytics_provider.py +++ b/tests/api/test_google_analytics_provider.py @@ -1,5 +1,6 @@ import unicodedata import urllib.parse +from unittest.mock import MagicMock import pytest from psycopg2.extras import NumericRange @@ -37,13 +38,13 @@ def test_init(self, db: DatabaseTransactionFixture): ) with pytest.raises(CannotLoadConfiguration) as excinfo: - GoogleAnalyticsProvider(integration) + GoogleAnalyticsProvider(integration, MagicMock()) assert "Google Analytics can't be configured without a library." in str( excinfo.value ) with pytest.raises(CannotLoadConfiguration) as excinfo: - GoogleAnalyticsProvider(integration, db.default_library()) + GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) assert ( "Missing tracking id for library %s" % db.default_library().short_name in str(excinfo.value) @@ -55,12 +56,12 @@ def test_init(self, db: DatabaseTransactionFixture): db.default_library(), integration, ).value = "faketrackingid" - ga = GoogleAnalyticsProvider(integration, db.default_library()) + ga = GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) assert GoogleAnalyticsProvider.DEFAULT_URL == ga.url assert "faketrackingid" == ga.tracking_id integration.url = db.fresh_str() - ga = GoogleAnalyticsProvider(integration, db.default_library()) + ga = GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) assert integration.url == ga.url assert "faketrackingid" == ga.tracking_id @@ -78,7 +79,7 @@ def test_collect_event_with_work(self, db: DatabaseTransactionFixture): db.default_library(), integration, ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, db.default_library()) + ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) work = db.work( title="pi\u00F1ata", @@ -146,7 +147,7 @@ def test_collect_event_without_work(self, db: DatabaseTransactionFixture): db.default_library(), integration, ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, db.default_library()) + ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) identifier = db.identifier() source = DataSource.lookup(db.session, DataSource.GUTENBERG) @@ -201,7 +202,7 @@ def test_collect_event_without_license_pool(self, db: DatabaseTransactionFixture db.default_library(), integration, ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, db.default_library()) + ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) now = utc_now() ga.collect_event(db.default_library(), None, CirculationEvent.NEW_PATRON, now) diff --git a/tests/api/test_opds2.py b/tests/api/test_opds2.py index 969b290261..5a57845b4f 100644 --- a/tests/api/test_opds2.py +++ b/tests/api/test_opds2.py @@ -1,6 +1,6 @@ import io import json -from unittest.mock import patch +from unittest.mock import MagicMock, patch from urllib.parse import parse_qs, quote, urlparse import pytest @@ -315,7 +315,7 @@ def test_opds2_with_authentication_tokens( work = works[0] identifier = work.presentation_edition.primary_identifier - manager = CirculationManager(controller_fixture.db.session) + manager = CirculationManager(controller_fixture.db.session, MagicMock()) patron = controller_fixture.db.patron() # Borrow the book from the library diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index 5094b5f4d8..a8952f09f4 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -4,7 +4,7 @@ import logging from io import StringIO from pathlib import Path -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Optional from unittest.mock import MagicMock, patch import pytest @@ -29,7 +29,6 @@ SessionManager, create, ) -from core.model.configuration import ExternalIntegrationLink from core.opds import AcquisitionFeed from core.util.datetime_helpers import datetime_utc, utc_now from core.util.flask_util import OPDSFeedResponse, Response @@ -543,6 +542,48 @@ def test_do_run( assert work.title in feed.content +class TestCacheMARCFilesFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + self.lane = db.lane(genres=["Science Fiction"]) + self.integration = db.external_integration( + ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL + ) + + self.exporter = MARCExporter(None, None, self.integration) + self.mock_records = MagicMock() + self.mock_services = MagicMock() + self.exporter.records = self.mock_records + + def script(self, cmd_args: Optional[list[str]] = None) -> CacheMARCFiles: + cmd_args = cmd_args or [] + return CacheMARCFiles( + self.db.session, services=self.mock_services, cmd_args=cmd_args + ) + + def assert_call(self, call: Any) -> None: + assert call.args[0] == self.lane + assert isinstance(call.args[1], MARCLibraryAnnotator) + assert call.args[2] == self.mock_services.storage.public.return_value + + def create_cached_file(self, end_time: datetime.datetime) -> CachedMARCFile: + representation, _ = self.db.representation() + cached, _ = create( + self.db.session, + CachedMARCFile, + library=self.db.default_library(), + lane=self.lane, + representation=representation, + end_time=end_time, + ) + return cached + + +@pytest.fixture +def cache_marc_files(db: DatabaseTransactionFixture) -> TestCacheMARCFilesFixture: + return TestCacheMARCFilesFixture(db) + + class TestCacheMARCFiles: def test_should_process_library(self, lane_script_fixture: LaneScriptFixture): db = lane_script_fixture.db @@ -582,125 +623,133 @@ def test_should_process_lane(self, lane_script_fixture: LaneScriptFixture): assert True == script.should_process_lane(wl) assert False == script.should_process_lane(empty) - def test_process_lane(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - lane = db.lane(genres=["Science Fiction"]) - integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL - ) - - class MockMARCExporter(MARCExporter): - called_with = [] - - def records(self, lane, annotator, mirror_integration, start_time=None): - self.called_with += [(lane, annotator, mirror_integration, start_time)] - - exporter = MockMARCExporter(None, None, integration) - - # This just needs to be an ExternalIntegration, but a storage integration - # makes the most sense in this context. - the_linked_integration, ignore = create( - db.session, - ExternalIntegration, - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - ) - - integration_link = db.external_integration_link( - integration=integration, - other_integration=the_linked_integration, - purpose=ExternalIntegrationLink.MARC, - ) - - script = CacheMARCFiles(db.session, cmd_args=[]) - script.process_lane(lane, exporter) + def test_process_lane_never_run(self, cache_marc_files: TestCacheMARCFilesFixture): + script = cache_marc_files.script() + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) # If the script has never been run before, it runs the exporter once # to create a file with all records. - assert 1 == len(exporter.called_with) - - assert lane == exporter.called_with[0][0] - assert isinstance(exporter.called_with[0][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[0][2] - assert None == exporter.called_with[0][3] + assert cache_marc_files.mock_records.call_count == 1 + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args) + def test_process_lane_cached_update( + self, cache_marc_files: TestCacheMARCFilesFixture + ): # If we have a cached file already, and it's old enough, the script will # run the exporter twice, first to update that file and second to create # a file with changes since that first file was originally created. - exporter.called_with = [] + db = cache_marc_files.db now = utc_now() - yesterday = now - datetime.timedelta(days=1) last_week = now - datetime.timedelta(days=7) + cache_marc_files.create_cached_file(last_week) ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.UPDATE_FREQUENCY, db.default_library(), integration - ).value = 3 - representation, ignore = db.representation() - cached, ignore = create( db.session, - CachedMARCFile, - library=db.default_library(), - lane=lane, - representation=representation, - end_time=last_week, - ) - - script.process_lane(lane, exporter) + MARCExporter.UPDATE_FREQUENCY, + db.default_library(), + cache_marc_files.integration, + ).value = 3 - assert 2 == len(exporter.called_with) + script = cache_marc_files.script() + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + assert cache_marc_files.mock_records.call_count == 2 - assert lane == exporter.called_with[0][0] - assert isinstance(exporter.called_with[0][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[0][2] - assert None == exporter.called_with[0][3] + # First call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) - assert lane == exporter.called_with[1][0] - assert isinstance(exporter.called_with[1][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[1][2] - assert exporter.called_with[1][3] < last_week + # Second call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + < last_week + ) + def test_process_lane_cached_recent( + self, cache_marc_files: TestCacheMARCFilesFixture + ): # If we already have a recent cached file, the script won't do anything. - cached.end_time = yesterday - exporter.called_with = [] - script.process_lane(lane, exporter) - assert [] == exporter.called_with + db = cache_marc_files.db + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + cache_marc_files.create_cached_file(yesterday) + ConfigurationSetting.for_library_and_externalintegration( + db.session, + MARCExporter.UPDATE_FREQUENCY, + db.default_library(), + cache_marc_files.integration, + ).value = 3 + script = cache_marc_files.script() + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + assert cache_marc_files.mock_records.call_count == 0 + + def test_process_lane_cached_recent_force( + self, cache_marc_files: TestCacheMARCFilesFixture + ): # But we can force it to run anyway. - script = CacheMARCFiles(db.session, cmd_args=["--force"]) - script.process_lane(lane, exporter) + db = cache_marc_files.db + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) + cache_marc_files.create_cached_file(yesterday) + ConfigurationSetting.for_library_and_externalintegration( + db.session, + MARCExporter.UPDATE_FREQUENCY, + db.default_library(), + cache_marc_files.integration, + ).value = 3 - assert 2 == len(exporter.called_with) + script = cache_marc_files.script(cmd_args=["--force"]) + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + assert cache_marc_files.mock_records.call_count == 2 - assert lane == exporter.called_with[0][0] - assert isinstance(exporter.called_with[0][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[0][2] - assert None == exporter.called_with[0][3] + # First call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) - assert lane == exporter.called_with[1][0] - assert isinstance(exporter.called_with[1][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[1][2] - assert exporter.called_with[1][3] < yesterday - assert exporter.called_with[1][3] > last_week + # Second call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + < yesterday + ) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + > last_week + ) + def test_process_lane_cached_frequency_zero( + self, cache_marc_files: TestCacheMARCFilesFixture + ): # The update frequency can also be 0, in which case it will always run. + # If we already have a recent cached file, the script won't do anything. + db = cache_marc_files.db + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) + cache_marc_files.create_cached_file(yesterday) ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.UPDATE_FREQUENCY, db.default_library(), integration + db.session, + MARCExporter.UPDATE_FREQUENCY, + db.default_library(), + cache_marc_files.integration, ).value = 0 - exporter.called_with = [] - script = CacheMARCFiles(db.session, cmd_args=[]) - script.process_lane(lane, exporter) + script = cache_marc_files.script() + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) - assert 2 == len(exporter.called_with) + assert cache_marc_files.mock_records.call_count == 2 - assert lane == exporter.called_with[0][0] - assert isinstance(exporter.called_with[0][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[0][2] - assert None == exporter.called_with[0][3] + # First call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) - assert lane == exporter.called_with[1][0] - assert isinstance(exporter.called_with[1][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[1][2] - assert exporter.called_with[1][3] < yesterday - assert exporter.called_with[1][3] > last_week + # Second call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + < yesterday + ) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + > last_week + ) class TestInstanceInitializationScript: diff --git a/tests/core/conftest.py b/tests/core/conftest.py index 4f494afddc..9ea0933198 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -9,6 +9,7 @@ "tests.fixtures.s3", "tests.fixtures.sample_covers", "tests.fixtures.search", + "tests.fixtures.services", "tests.fixtures.time", "tests.fixtures.tls_server", ] diff --git a/tests/core/models/test_collection.py b/tests/core/models/test_collection.py index b85247f201..a42b7150ed 100644 --- a/tests/core/models/test_collection.py +++ b/tests/core/models/test_collection.py @@ -7,11 +7,7 @@ from core.model import create, get_one_or_create from core.model.circulationevent import CirculationEvent from core.model.collection import Collection -from core.model.configuration import ( - ConfigurationSetting, - ExternalIntegration, - ExternalIntegrationLink, -) +from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.coverage import CoverageRecord, WorkCoverageRecord from core.model.customlist import CustomList from core.model.datasource import DataSource @@ -934,28 +930,6 @@ def test_delete(self, example_collection_fixture: ExampleCollectionFixture): ) setting2.value = "value2" - # Also it has links to another independent ExternalIntegration (S3 storage in this case). - s3_storage = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - libraries=[db.default_library()], - ) - link1 = db.external_integration_link( - integration, - db.default_library(), - s3_storage, - ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS, - ) - link2 = db.external_integration_link( - integration, - db.default_library(), - s3_storage, - ExternalIntegrationLink.COVERS, - ) - - integration.links.append(link1) - integration.links.append(link2) - # It's got a Work that has a LicensePool, which has a License, # which has a loan. work = db.work(with_license_pool=True) @@ -1047,20 +1021,15 @@ def remove_work(self, work): # has any LicensePools), but not the second. assert [work] == index.removed - # The collection ExternalIntegration, its settings, and links to other integrations have been deleted. + # The collection ExternalIntegration and its settings have been deleted. # The storage ExternalIntegration remains. external_integrations = db.session.query(ExternalIntegration).all() assert integration not in external_integrations - assert s3_storage in external_integrations settings = db.session.query(ConfigurationSetting).all() for setting in (setting1, setting2): assert setting not in settings - links = db.session.query(ExternalIntegrationLink).all() - for link in (link1, link2): - assert link not in links - # If no search_index is passed into delete() (the default behavior), # we try to instantiate the normal ExternalSearchIndex object. Since # no search index is configured, this will raise an exception -- but diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index f21c2b65b5..e7f7fe8ee6 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -421,12 +421,12 @@ def test_relationships(self, db: DatabaseTransactionFixture): storage1 = db.external_integration( name="integration1", - protocol=ExternalIntegration.S3, + protocol="protocol", ) storage2 = db.external_integration( name="integration2", - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, + protocol="protocol", + goal="storage", username="username", password="password", ) @@ -721,17 +721,17 @@ def test_delete( db = example_externalintegration_fixture.database_fixture integration1 = db.external_integration( - ExternalIntegration.LCP, + "protocol", ExternalIntegration.LICENSE_GOAL, libraries=[db.default_library()], ) integration2 = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, + "storage", + "storage goal", libraries=[db.default_library()], ) - # Set up a a link associating integration2 with integration1. + # Set up a link associating integration2 with integration1. link1 = db.external_integration_link( integration1, db.default_library(), diff --git a/tests/core/service/__init__.py b/tests/core/service/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/service/storage/__init__.py b/tests/core/service/storage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/service/storage/test_configuration.py b/tests/core/service/storage/test_configuration.py new file mode 100644 index 0000000000..2621bff70e --- /dev/null +++ b/tests/core/service/storage/test_configuration.py @@ -0,0 +1,47 @@ +import pytest + +from core.config import CannotLoadConfiguration +from core.service.storage.configuration import StorageConfiguration + + +def test_region_validation_fail(): + with pytest.raises(CannotLoadConfiguration) as exc_info: + StorageConfiguration(region="foo bar baz") + + assert "PALACE_STORAGE_REGION: Invalid region: foo bar baz." in str(exc_info.value) + + +def test_region_validation_success(): + configuration = StorageConfiguration(region="us-west-2") + assert configuration.region == "us-west-2" + + configuration = StorageConfiguration(region=None) + assert configuration.region is None + + +@pytest.mark.parametrize( + "url", + [ + "http://localhost:9000", + "https://real.endpoint.com", + "http://192.168.0.1", + ], +) +def test_endpoint_url_validation_success(url: str): + configuration = StorageConfiguration(endpoint_url=url) + assert configuration.endpoint_url == url + + +@pytest.mark.parametrize( + "url, error", + [ + ("ftp://localhost:9000", "URL scheme not permitted"), + ("foo bar baz", "invalid or missing URL scheme"), + ], +) +def test_endpoint_url_validation_fail(url: str, error: str): + with pytest.raises(CannotLoadConfiguration) as exc_info: + StorageConfiguration(endpoint_url=url) + + assert "PALACE_STORAGE_ENDPOINT_URL" in str(exc_info.value) + assert error in str(exc_info.value) diff --git a/tests/core/service/storage/test_s3.py b/tests/core/service/storage/test_s3.py new file mode 100644 index 0000000000..7587de7a9e --- /dev/null +++ b/tests/core/service/storage/test_s3.py @@ -0,0 +1,416 @@ +from __future__ import annotations + +import functools +from io import BytesIO +from typing import TYPE_CHECKING, Generator, Optional +from unittest.mock import MagicMock + +import pytest +from botocore.exceptions import BotoCoreError, ClientError +from mypy_boto3_s3 import S3Client +from pydantic import AnyHttpUrl + +from core.config import CannotLoadConfiguration +from core.service.configuration import ServiceConfiguration +from core.service.storage.container import Storage +from core.service.storage.s3 import S3Service + +if TYPE_CHECKING: + from tests.fixtures.s3 import S3ServiceFixture + + +class TestS3Service: + def test_factory(self, s3_service_fixture: S3ServiceFixture): + """The S3Service.factory method returns an S3Service, if given + a bucket, or None otherwise. + """ + # No bucket, no service. + factory = functools.partial( + S3Service.factory, + client=s3_service_fixture.mock_s3_client, + region=s3_service_fixture.region, + url_template=s3_service_fixture.url_template, + ) + assert factory(bucket=None) is None + + # Bucket, service. + service = factory(bucket="bucket") + assert isinstance(service, S3Service) + assert service.client == s3_service_fixture.mock_s3_client + assert service.region == s3_service_fixture.region + assert service.bucket == "bucket" + assert service.url_template == s3_service_fixture.url_template + + @pytest.mark.parametrize( + "url_template", + [ + # No region passed into the constructor, but the URL template + # contains a region. + "https://{bucket}.s3.{region}.amazonaws.com/{key}", + # No key in the URL template. + "https://no-key-in-template.com/", + ], + ) + def test_constructor_exception( + self, url_template: str, s3_service_fixture: S3ServiceFixture + ): + """The S3Service constructor raises an exception if the URL template is invalid.""" + with pytest.raises(CannotLoadConfiguration): + s3_service_fixture.service(url_template=url_template, region=None) + + @pytest.mark.parametrize( + "template,key,expected", + [ + ( + "https://{bucket}.s3.{region}.amazonaws.com/{key}", + "key", + "https://bucket.s3.region.amazonaws.com/key", + ), + ( + "https://test.com/{bucket}/{key}", + "key with spaces", + "https://test.com/bucket/key%20with%20spaces", + ), + ( + "https://test.com/{bucket}/{key}", + "s p a c e s/🔥/slashes%", + "https://test.com/bucket/s%20p%20a%20c%20e%20s/%F0%9F%94%A5/slashes%25", + ), + ( + "https://cdn.com/{key}", + "filename.ext", + "https://cdn.com/filename.ext", + ), + ], + ) + def test_generate_url( + self, + template: str, + key: str, + expected: str, + s3_service_fixture: S3ServiceFixture, + ): + """ + Generate URL creates a URL based on the URL template, it uses format to template in + the region, bucket, and key, then makes sure the URL is urlencoded. + """ + service = s3_service_fixture.service(url_template=template) + url = service.generate_url(key) + assert url == expected + + @pytest.mark.parametrize( + "content", + ["foo bar baz", b"byte string"], + ) + def test_store(self, content: bytes | str, s3_service_fixture: S3ServiceFixture): + service = s3_service_fixture.service() + service.store_stream = MagicMock() + + if isinstance(content, str): + expected_content = content.encode("utf8") + else: + expected_content = content + + service.store("key", content, "text/plain") + service.store_stream.assert_called_once() + assert service.store_stream.call_args.kwargs["key"] == "key" + stream = service.store_stream.call_args.kwargs["stream"] + assert isinstance(stream, BytesIO) + assert stream.getvalue() == expected_content + assert service.store_stream.call_args.kwargs["content_type"] == "text/plain" + + @pytest.mark.parametrize( + "content_type", + ["text/plain", "application/binary", None], + ) + def test_store_stream( + self, content_type: str, s3_service_fixture: S3ServiceFixture + ): + service = s3_service_fixture.service() + stream = MagicMock(spec=BytesIO) + + if content_type: + url = service.store_stream("key", stream, content_type) + else: + url = service.store_stream("key", stream) + + mock_s3_client = s3_service_fixture.mock_s3_client + mock_s3_client.upload_fileobj.assert_called_once() + assert mock_s3_client.upload_fileobj.call_args.kwargs["Fileobj"] == stream + assert ( + mock_s3_client.upload_fileobj.call_args.kwargs["Bucket"] + == s3_service_fixture.bucket + ) + assert mock_s3_client.upload_fileobj.call_args.kwargs["Key"] == "key" + assert url == "https://region.test.com/bucket/key" + stream.close.assert_called_once() + + if content_type: + assert mock_s3_client.upload_fileobj.call_args.kwargs["ExtraArgs"] == { + "ContentType": content_type + } + else: + assert mock_s3_client.upload_fileobj.call_args.kwargs["ExtraArgs"] == {} + + @pytest.mark.parametrize( + "exception", + [BotoCoreError(), ClientError({}, "")], + ) + def test_store_stream_exception( + self, exception: Exception, s3_service_fixture: S3ServiceFixture + ): + service = s3_service_fixture.service() + stream = MagicMock(spec=BytesIO) + + mock_s3_client = s3_service_fixture.mock_s3_client + mock_s3_client.upload_fileobj.side_effect = exception + assert service.store_stream("key", stream) is None + mock_s3_client.upload_fileobj.assert_called_once() + stream.close.assert_called_once() + + def test_multipart_upload(self, s3_service_fixture: S3ServiceFixture): + service = s3_service_fixture.service() + + # Successful upload + with service.multipart(key="key") as upload: + assert upload.client == s3_service_fixture.mock_s3_client + assert upload.bucket == s3_service_fixture.bucket + assert upload.key == "key" + assert upload.parts == [] + + s3_service_fixture.mock_s3_client.create_multipart_upload.assert_called_once() + assert upload.complete is False + assert upload.url == "https://region.test.com/bucket/key" + assert upload.exception is None + + upload.upload_part(b"Part 1") + assert s3_service_fixture.mock_s3_client.upload_part.call_count == 1 + upload.upload_part(b"Part 2") + assert s3_service_fixture.mock_s3_client.upload_part.call_count == 2 + + assert len(upload.parts) == 2 + [part1, part2] = upload.parts + assert part1.PartNumber == 1 + assert part2.PartNumber == 2 + + s3_service_fixture.mock_s3_client.complete_multipart_upload.assert_not_called() + + assert upload.complete is True + assert upload.exception is None + s3_service_fixture.mock_s3_client.complete_multipart_upload.assert_called_once() + + def test_multipart_upload_boto_exception( + self, s3_service_fixture: S3ServiceFixture + ): + service = s3_service_fixture.service() + exception = BotoCoreError() + s3_service_fixture.mock_s3_client.upload_part.side_effect = exception + + # A boto exception is raised during upload, but it is captured + # and the upload is aborted. + with service.multipart(key="key") as upload: + assert upload.complete is False + assert upload.url == "https://region.test.com/bucket/key" + assert upload.exception is None + upload.upload_part(b"test") + + assert upload.complete is False + assert upload.exception is exception + s3_service_fixture.mock_s3_client.abort_multipart_upload.assert_called_once() + + def test_multipart_upload_other_exception( + self, s3_service_fixture: S3ServiceFixture + ): + service = s3_service_fixture.service() + exception = ValueError("foo") + s3_service_fixture.mock_s3_client.upload_part.side_effect = exception + + # A non-boto exception is raised during upload, the upload is aborted + # and the exception is raised. + with pytest.raises(ValueError) as excinfo: + with service.multipart(key="key") as upload: + assert upload.complete is False + assert upload.url == "https://region.test.com/bucket/key" + assert upload.exception is None + upload.upload_part(b"test") + + assert upload.complete is False + assert upload.exception is exception + s3_service_fixture.mock_s3_client.abort_multipart_upload.assert_called_once() + assert excinfo.value is exception + + # Calling upload_part after the upload is complete raises an error. + with pytest.raises(RuntimeError): + upload.upload_part(b"foo") + + +class S3UploaderIntegrationConfiguration(ServiceConfiguration): + endpoint_url: AnyHttpUrl + user: str + password: str + + class Config(ServiceConfiguration.Config): + env_prefix = "PALACE_TEST_MINIO_" + + +class S3ServiceIntegrationFixture: + def __init__(self): + self.container = Storage() + self.configuration = S3UploaderIntegrationConfiguration() + self.container.config.from_dict( + { + "access_key": self.configuration.user, + "secret_key": self.configuration.password, + "endpoint_url": self.configuration.endpoint_url, + "region": "us-east-1", + "analytics_bucket": "analytics", + "public_access_bucket": "public", + "url_template": self.configuration.endpoint_url + "/{bucket}/{key}", + } + ) + self.buckets = [] + self.create_bucket("analytics") + self.create_bucket("public") + + @property + def s3_client(self) -> S3Client: + return self.container.s3_client() + + @property + def public(self) -> S3Service: + return self.container.public() + + @property + def analytics(self) -> S3Service: + return self.container.analytics() + + def create_bucket(self, bucket_name: str) -> None: + client = self.s3_client + client.create_bucket(Bucket=bucket_name) + self.buckets.append(bucket_name) + + def close(self): + for bucket in self.buckets: + response = self.s3_client.list_objects(Bucket=bucket) + + for object in response.get("Contents", []): + object_key = object["Key"] + self.s3_client.delete_object(Bucket=bucket, Key=object_key) + + self.s3_client.delete_bucket(Bucket=bucket) + + +@pytest.fixture +def s3_service_integration_fixture() -> Generator[ + S3ServiceIntegrationFixture, None, None +]: + fixture = S3ServiceIntegrationFixture() + yield fixture + fixture.close() + + +@pytest.mark.minio +class TestS3ServiceIntegration: + @pytest.mark.parametrize( + "key, bucket, content, content_type", + [ + ("key", "public", "foo bar baz", "text/plain"), + ("key/w i t h/slash/.!%:", "public", b"byte string", None), + ("key/with/🥏", "public", "🔥", None), + ("ûberkey", "analytics", "foo bar", "application/pdf"), + ("õ/🤖/analytics.foo", "analytics", b"another byte string", None), + ("normal/key", "analytics", "🚀", None), + ], + ) + def test_store( + self, + key: str, + bucket: str, + content: bytes | str, + content_type: Optional[str], + s3_service_integration_fixture: S3ServiceIntegrationFixture, + ): + """The S3Service.store method stores content in the bucket.""" + service = getattr(s3_service_integration_fixture, bucket) + service.store(key, content, content_type) + response = s3_service_integration_fixture.s3_client.get_object( + Bucket=bucket, Key=key + ) + + if isinstance(content, str): + # The response we get back from S3 is always utf-8 encoded bytes. + expected_content = content.encode("utf8") + else: + expected_content = content + + assert response["Body"].read() == expected_content + + if content_type is None: + expected_content_type = "binary/octet-stream" + else: + expected_content_type = content_type + assert response["ContentType"] == expected_content_type + + @pytest.mark.parametrize( + "key, bucket, content, content_type", + [ + ("key", "public", b"foo bar baz", "text/plain"), + ("key/with/slash", "public", b"byte string", None), + ("key/with/🥏", "public", "🔥".encode(), None), + ("ûberkey", "analytics", b"foo bar", "application/pdf"), + ("õ/🤖/analytics.foo", "analytics", b"another byte string", None), + ("normal/key", "analytics", "🚀".encode(), None), + ], + ) + def test_multipart( + self, + key: str, + bucket: str, + content: bytes, + content_type: Optional[str], + s3_service_integration_fixture: S3ServiceIntegrationFixture, + ): + service = getattr(s3_service_integration_fixture, bucket) + part_1_data = ( + b"a" * 5 * 1024**2 + ) # Minimum part size is 5MB, so we generate some junk data to send. + part_2_data = b"b" * 5 * 1024**2 + with service.multipart(key=key, content_type=content_type) as upload: + upload.upload_part(part_1_data) + upload.upload_part(part_2_data) + upload.upload_part(content) + assert not upload.complete + assert upload.exception is None + + assert upload.complete + assert upload.exception is None + + response = s3_service_integration_fixture.s3_client.get_object( + Bucket=bucket, Key=key + ) + assert response["Body"].read() == part_1_data + part_2_data + content + + if content_type is None: + expected_content_type = "binary/octet-stream" + else: + expected_content_type = content_type + assert response["ContentType"] == expected_content_type + + def test_multipart_one_small_part( + self, + s3_service_integration_fixture: S3ServiceIntegrationFixture, + ): + # If we only have one part, we are allowed to upload less than 5MB. + service = s3_service_integration_fixture.public + with service.multipart(key="key") as upload: + upload.upload_part(b"small data") + assert not upload.complete + assert upload.exception is None + + assert upload.complete + assert upload.exception is None + + response = s3_service_integration_fixture.s3_client.get_object( + Bucket="public", Key="key" + ) + assert response["Body"].read() == b"small data" diff --git a/tests/core/service/test_configuration.py b/tests/core/service/test_configuration.py new file mode 100644 index 0000000000..48dd008891 --- /dev/null +++ b/tests/core/service/test_configuration.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest +from pyfakefs.fake_filesystem import FakeFilesystem + +from core.config import CannotLoadConfiguration +from core.service.configuration import ServiceConfiguration + +if TYPE_CHECKING: + from _pytest.monkeypatch import MonkeyPatch + + +class MockServiceConfiguration(ServiceConfiguration): + string_with_default: str = "default" + string_without_default: str + int_type: int = 12 + + class Config: + env_prefix = "MOCK_" + + +class ServiceConfigurationFixture: + def __init__(self, type: str, monkeypatch: MonkeyPatch, fs: FakeFilesystem): + self.type = type + self.monkeypatch = monkeypatch + self.fs = fs + + # Make sure the environment is empty + self.monkeypatch.delenv("MOCK_STRING_WITHOUT_DEFAULT", raising=False) + self.monkeypatch.delenv("MOCK_INT_TYPE", raising=False) + self.monkeypatch.delenv("MOCK_STRING_WITH_DEFAULT", raising=False) + + # Make sure the .env file is empty + project_root = Path(__file__).parent.parent.parent.parent.absolute() + self.env_file = fs.create_file(project_root / ".env", contents="") + + def set(self, key: str, value: str): + if self.type == "env": + self.set_env(key, value) + elif self.type == "dot_env": + self.set_dot_env(key, value) + else: + raise ValueError(f"Unknown type: {self.type}") + + def set_env(self, key: str, value: str): + self.monkeypatch.setenv(key, value) + + def set_dot_env(self, key: str, value: str): + existing = self.env_file.contents or "" + self.env_file.set_contents("\n".join([existing, f"{key}={value}"])) + + +@pytest.fixture(params=["env", "dot_env"]) +def service_configuration_fixture( + request: pytest.FixtureRequest, monkeypatch: MonkeyPatch, fs: FakeFilesystem +): + if request.param not in ["env", "dot_env"]: + raise ValueError(f"Unknown param: {request.param}") + + return ServiceConfigurationFixture(request.param, monkeypatch, fs) + + +class TestServiceConfiguration: + def test_set(self, service_configuration_fixture: ServiceConfigurationFixture): + service_configuration_fixture.set("MOCK_STRING_WITHOUT_DEFAULT", "string") + service_configuration_fixture.set("MOCK_INT_TYPE", "42") + + config = MockServiceConfiguration() + + assert config.string_with_default == "default" + assert config.string_without_default == "string" + assert config.int_type == 42 + + def test_override_default( + self, service_configuration_fixture: ServiceConfigurationFixture + ): + service_configuration_fixture.set("MOCK_STRING_WITHOUT_DEFAULT", "string") + service_configuration_fixture.set("MOCK_INT_TYPE", "42") + # Note the spaces around the value, these should be stripped + service_configuration_fixture.set("MOCK_STRING_WITH_DEFAULT", " not default ") + + config = MockServiceConfiguration() + + assert config.string_with_default == "not default" + assert config.string_without_default == "string" + assert config.int_type == 42 + + def test_encoding(self, service_configuration_fixture: ServiceConfigurationFixture): + service_configuration_fixture.set("MOCK_STRING_WITHOUT_DEFAULT", "🎉") + config = MockServiceConfiguration() + assert config.string_without_default == "🎉" + + def test_exception_missing( + self, service_configuration_fixture: ServiceConfigurationFixture + ): + with pytest.raises(CannotLoadConfiguration) as exc_info: + MockServiceConfiguration() + + assert "MOCK_STRING_WITHOUT_DEFAULT: field required" in str(exc_info.value) + + def test_exception_validation( + self, service_configuration_fixture: ServiceConfigurationFixture + ): + service_configuration_fixture.set("MOCK_INT_TYPE", "this is not an int") + + with pytest.raises(CannotLoadConfiguration) as exc_info: + MockServiceConfiguration() + + assert "MOCK_INT_TYPE: value is not a valid integer" in str(exc_info.value) + + def test_exception_mutation( + self, service_configuration_fixture: ServiceConfigurationFixture + ): + service_configuration_fixture.set("MOCK_STRING_WITHOUT_DEFAULT", "string") + config = MockServiceConfiguration() + + with pytest.raises(TypeError): + # Ignore the type error, since it tells us this is immutable, + # and we are testing that behavior at runtime. + config.string_with_default = "new value" # type: ignore[misc] diff --git a/tests/core/test_local_analytics_provider.py b/tests/core/test_local_analytics_provider.py index 031b9a90d0..478e4d6d68 100644 --- a/tests/core/test_local_analytics_provider.py +++ b/tests/core/test_local_analytics_provider.py @@ -1,9 +1,16 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + import pytest from core.local_analytics_provider import LocalAnalyticsProvider from core.model import CirculationEvent, ExternalIntegration, create, get_one from core.util.datetime_helpers import utc_now -from tests.fixtures.database import DatabaseTransactionFixture + +if TYPE_CHECKING: + from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.services import MockServicesFixture class TestInitializeLocalAnalyticsProvider: @@ -49,7 +56,11 @@ class LocalAnalyticsProviderFixture: integration: ExternalIntegration la: LocalAnalyticsProvider - def __init__(self, transaction: DatabaseTransactionFixture): + def __init__( + self, + transaction: DatabaseTransactionFixture, + mock_services_fixture: MockServicesFixture, + ): self.transaction = transaction self.integration, ignore = create( transaction.session, @@ -57,16 +68,17 @@ def __init__(self, transaction: DatabaseTransactionFixture): goal=ExternalIntegration.ANALYTICS_GOAL, protocol="core.local_analytics_provider", ) + self.services = mock_services_fixture.services self.la = LocalAnalyticsProvider( - self.integration, transaction.default_library() + self.integration, self.services, transaction.default_library() ) @pytest.fixture() def local_analytics_provider_fixture( - db, + db: DatabaseTransactionFixture, mock_services_fixture: MockServicesFixture ) -> LocalAnalyticsProviderFixture: - return LocalAnalyticsProviderFixture(db) + return LocalAnalyticsProviderFixture(db, mock_services_fixture) class TestLocalAnalyticsProvider: @@ -123,7 +135,7 @@ def test_collect_event( # It's possible to instantiate the LocalAnalyticsProvider # without a library. - la = LocalAnalyticsProvider(data.integration) + la = LocalAnalyticsProvider(data.integration, data.services) # In that case, it will process events for any library. for library in [database.default_library(), library2]: @@ -184,7 +196,7 @@ def test_neighborhood_is_location( data.integration.setting( p.LOCATION_SOURCE ).value = p.LOCATION_SOURCE_NEIGHBORHOOD - la = p(data.integration, database.default_library()) + la = p(data.integration, data.services, database.default_library()) event, is_new = la.collect_event( database.default_library(), diff --git a/tests/core/test_marc.py b/tests/core/test_marc.py index 3d7debba0f..27355241e1 100644 --- a/tests/core/test_marc.py +++ b/tests/core/test_marc.py @@ -1,4 +1,7 @@ +from __future__ import annotations + import datetime +from typing import TYPE_CHECKING from urllib.parse import quote import pytest @@ -24,12 +27,14 @@ Work, get_one, ) -from core.s3 import MockS3Uploader from core.util.datetime_helpers import datetime_utc, utc_now -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.search import ExternalSearchFixtureFake from tests.mocks.search import ExternalSearchIndexFake +if TYPE_CHECKING: + from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.s3 import S3ServiceFixture + from tests.fixtures.search import ExternalSearchFixtureFake + class TestAnnotator: def test_annotate_work_record(self, db: DatabaseTransactionFixture): @@ -467,7 +472,20 @@ def test_add_ebooks_subject(self): self._check_field(record, "655", {"a": "Electronic books."}, [" ", "0"]) -class TestMARCExporter: +class MarcExporterFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + + self.integration = self._integration(db) + self.now = utc_now() + self.exporter = MARCExporter.from_config(db.default_library()) + self.annotator = Annotator() + self.w1 = db.work(genre="Mystery", with_open_access_download=True) + self.w2 = db.work(genre="Mystery", with_open_access_download=True) + + self.search_engine = ExternalSearchIndexFake(db.session) + self.search_engine.mock_query_works([self.w1, self.w2]) + @staticmethod def _integration(db: DatabaseTransactionFixture): return db.external_integration( @@ -476,12 +494,23 @@ def _integration(db: DatabaseTransactionFixture): libraries=[db.default_library()], ) + +@pytest.fixture +def marc_exporter_fixture( + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, +) -> MarcExporterFixture: + # external_search_fake_fixture is used only for the integration it creates + return MarcExporterFixture(db) + + +class TestMARCExporter: def test_from_config(self, db: DatabaseTransactionFixture): pytest.raises( CannotLoadConfiguration, MARCExporter.from_config, db.default_library() ) - integration = self._integration(db) + integration = MarcExporterFixture._integration(db) exporter = MARCExporter.from_config(db.default_library()) assert integration == exporter.integration assert db.default_library() == exporter.library @@ -544,7 +573,7 @@ def test_create_record(self, db: DatabaseTransactionFixture): assert "author, new" in cached # If we pass in an integration, it's passed along to the annotator. - integration = self._integration(db) + integration = MarcExporterFixture._integration(db) class MockAnnotator(Annotator): integration = None @@ -586,218 +615,141 @@ def test_create_record_roundtrip(self, db: DatabaseTransactionFixture): new_record = MARCExporter.create_record(new_work, annotator) assert record.as_marc() == new_record.as_marc() - def test_records( + @pytest.mark.parametrize("object_type", ["lane", "worklist"]) + def test_records_lane( self, + object_type: str, db: DatabaseTransactionFixture, - external_search_fake_fixture: ExternalSearchFixtureFake, + s3_service_fixture: S3ServiceFixture, + marc_exporter_fixture: MarcExporterFixture, ): - # external_search_fake_fixture is used only for the integration it creates - integration = self._integration(db) - now = utc_now() - exporter = MARCExporter.from_config(db.default_library()) - annotator = Annotator() - lane = db.lane("Test Lane", genres=["Mystery"]) - w1 = db.work(genre="Mystery", with_open_access_download=True) - w2 = db.work(genre="Mystery", with_open_access_download=True) - - search_engine = ExternalSearchIndexFake(db.session) - search_engine.mock_query_works([w1, w2]) + if object_type == "lane": + lane_or_wl = db.lane("Test Lane", genres=["Mystery"]) + elif object_type == "worklist": + lane_or_wl = WorkList() + lane_or_wl.initialize(db.default_library(), display_name="All Books") + else: + raise RuntimeError() + exporter = marc_exporter_fixture.exporter + annotator = marc_exporter_fixture.annotator + search_engine = marc_exporter_fixture.search_engine # If there's a storage protocol but not corresponding storage integration, # it raises an exception. - pytest.raises(Exception, exporter.records, lane, annotator) - - # If there is a storage integration, the output file is mirrored. - mirror_integration = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - username="username", - password="password", - ) - - mirror = MockS3Uploader() + pytest.raises(Exception, exporter.records, lane_or_wl, annotator) + storage_service = s3_service_fixture.mock_service() exporter.records( - lane, + lane_or_wl, annotator, - mirror_integration, - mirror=mirror, + storage_service, query_batch_size=1, upload_batch_size=1, search_engine=search_engine, ) # The file was mirrored and a CachedMARCFile was created to track the mirrored file. - assert 1 == len(mirror.uploaded) + assert len(storage_service.uploads) == 1 [cache] = db.session.query(CachedMARCFile).all() - assert db.default_library() == cache.library - assert lane == cache.lane - assert mirror.uploaded[0] == cache.representation - assert None == cache.representation.content - assert ( - "https://test-marc-bucket.s3.amazonaws.com/%s/%s/%s.mrc" - % ( - db.default_library().short_name, - quote(str(cache.representation.fetched_at)), - quote(lane.display_name), - ) - == mirror.uploaded[0].mirror_url - ) - assert None == cache.start_time - assert cache.end_time > now - - # The content was uploaded in two parts. - assert 2 == len(mirror.content[0]) - complete_file = b"".join(mirror.content[0]) - records = list(MARCReader(complete_file)) - assert 2 == len(records) + assert cache.library == db.default_library() + if object_type == "lane": + assert cache.lane == lane_or_wl + else: + assert cache.lane is None + assert cache.representation.content is None + assert storage_service.uploads[0].key == "{}/{}/{}.mrc".format( + db.default_library().short_name, + str(cache.representation.fetched_at), + lane_or_wl.display_name, + ) + assert quote(storage_service.uploads[0].key) in cache.representation.mirror_url + assert cache.start_time is None + assert marc_exporter_fixture.now < cache.end_time + + records = list(MARCReader(storage_service.uploads[0].content)) + assert len(records) == 2 title_fields = [record.get_fields("245") for record in records] titles = [fields[0].get_subfields("a")[0] for fields in title_fields] - assert {w1.title, w2.title} == set(titles) - - assert w1.title in w1.marc_record - assert w2.title in w2.marc_record - - db.session.delete(cache) - - search_engine.mock_query_works([w1, w2]) - # It also works with a WorkList instead of a Lane, in which case - # there will be no lane in the CachedMARCFile. - worklist = WorkList() - worklist.initialize(db.default_library(), display_name="All Books") - - mirror = MockS3Uploader() - exporter.records( - worklist, - annotator, - mirror_integration, - mirror=mirror, - query_batch_size=1, - upload_batch_size=1, - search_engine=search_engine, - ) - - assert 1 == len(mirror.uploaded) - [cache] = db.session.query(CachedMARCFile).all() - assert db.default_library() == cache.library - assert None == cache.lane - assert mirror.uploaded[0] == cache.representation - assert None == cache.representation.content - assert ( - "https://test-marc-bucket.s3.amazonaws.com/%s/%s/%s.mrc" - % ( - db.default_library().short_name, - quote(str(cache.representation.fetched_at)), - quote(worklist.display_name), - ) - == mirror.uploaded[0].mirror_url - ) - assert None == cache.start_time - assert cache.end_time > now - - assert 2 == len(mirror.content[0]) - complete_file = b"".join(mirror.content[0]) - records = list(MARCReader(complete_file)) - assert 2 == len(records) + assert set(titles) == { + marc_exporter_fixture.w1.title, + marc_exporter_fixture.w2.title, + } - db.session.delete(cache) + assert marc_exporter_fixture.w1.title in marc_exporter_fixture.w1.marc_record + assert marc_exporter_fixture.w2.title in marc_exporter_fixture.w2.marc_record + def test_records_start_time( + self, + db: DatabaseTransactionFixture, + s3_service_fixture: S3ServiceFixture, + marc_exporter_fixture: MarcExporterFixture, + ): # If a start time is set, it's used in the mirror url. # # (Our mock search engine returns everthing in its 'index', # so this doesn't test that the start time is actually used to # find works -- that's in the search index tests and the # tests of MARCExporterFacets.) - start_time = now - datetime.timedelta(days=3) + start_time = marc_exporter_fixture.now - datetime.timedelta(days=3) + exporter = marc_exporter_fixture.exporter + annotator = marc_exporter_fixture.annotator + search_engine = marc_exporter_fixture.search_engine + lane = db.lane("Test Lane", genres=["Mystery"]) + storage_service = s3_service_fixture.mock_service() - mirror = MockS3Uploader() exporter.records( lane, annotator, - mirror_integration, + storage_service, start_time=start_time, - mirror=mirror, query_batch_size=2, upload_batch_size=2, search_engine=search_engine, ) [cache] = db.session.query(CachedMARCFile).all() - assert db.default_library() == cache.library - assert lane == cache.lane - assert mirror.uploaded[0] == cache.representation - assert None == cache.representation.content - assert ( - "https://test-marc-bucket.s3.amazonaws.com/%s/%s-%s/%s.mrc" - % ( - db.default_library().short_name, - quote(str(start_time)), - quote(str(cache.representation.fetched_at)), - quote(lane.display_name), - ) - == mirror.uploaded[0].mirror_url + assert cache.library == db.default_library() + assert cache.lane == lane + assert cache.representation.content is None + assert storage_service.uploads[0].key == "{}/{}-{}/{}.mrc".format( + db.default_library().short_name, + str(start_time), + str(cache.representation.fetched_at), + lane.display_name, ) - assert start_time == cache.start_time - assert cache.end_time > now - db.session.delete(cache) + assert cache.start_time == start_time + assert marc_exporter_fixture.now < cache.end_time + def test_records_empty_search( + self, + db: DatabaseTransactionFixture, + s3_service_fixture: S3ServiceFixture, + marc_exporter_fixture: MarcExporterFixture, + ): # If the search engine returns no contents for the lane, # nothing will be mirrored, but a CachedMARCFile is still # created to track that we checked for updates. - search_engine.mock_query_works([]) + exporter = marc_exporter_fixture.exporter + annotator = marc_exporter_fixture.annotator + empty_search_engine = ExternalSearchIndexFake(db.session) + lane = db.lane("Test Lane", genres=["Mystery"]) + storage_service = s3_service_fixture.mock_service() - mirror = MockS3Uploader() exporter.records( lane, annotator, - mirror_integration, - mirror=mirror, - search_engine=search_engine, + storage_service, + search_engine=empty_search_engine, ) - assert [] == mirror.content[0] + assert [] == storage_service.uploads [cache] = db.session.query(CachedMARCFile).all() - assert cache.representation == mirror.uploaded[0] - assert db.default_library() == cache.library - assert lane == cache.lane - assert None == cache.representation.content - assert None == cache.start_time - assert cache.end_time > now - - db.session.delete(cache) - - def test_get_storage_settings(self, db: DatabaseTransactionFixture): - # Two ExternalIntegration, one has a marc_bucket setting, and the - # other doesn't. - has_marc_bucket = db.external_integration( - name="has_marc_bucket", - protocol=db.fresh_str(), - goal=ExternalIntegration.STORAGE_GOAL, - settings={"marc_bucket": "test-marc-bucket"}, - ) - db.external_integration( - name="no_marc_bucket", - protocol=db.fresh_str(), - goal=ExternalIntegration.STORAGE_GOAL, - ) - - # Before we call get_storage_settings, the only option is the default. - assert MARCExporter.SETTING["options"] == [ - MARCExporter.DEFAULT_MIRROR_INTEGRATION - ] - - MARCExporter.get_storage_settings(db.session) - - # After we call get_storage_settings, the options are the default and - # the ExternalIntegration with a marc_bucket setting. - assert len(MARCExporter.SETTING["options"]) == 2 - [default, from_config] = MARCExporter.SETTING["options"] - assert default == MARCExporter.DEFAULT_MIRROR_INTEGRATION - assert from_config == { - "key": str(has_marc_bucket.id), - "label": has_marc_bucket.name, - } + assert cache.library == db.default_library() + assert cache.lane == lane + assert cache.representation.content is None + assert cache.start_time is None + assert marc_exporter_fixture.now < cache.end_time class TestMARCExporterFacets: diff --git a/tests/core/test_marc2.py b/tests/core/test_marc2.py deleted file mode 100644 index 07b96a8502..0000000000 --- a/tests/core/test_marc2.py +++ /dev/null @@ -1,15 +0,0 @@ -from core.mirror import MirrorUploader - - -class TestMiscellaneous: - def test_mirror_uploader_implementations_are_being_loaded(self): - """ - This test verifies that the two S3 mirror implementations are being - loaded when the MARCExporter is imported. It was not added to - tests/core/test_marc.py because that test causes the implementations - to be loaded since it references the core.s3 package directly. - """ - from core.marc import MARCExporter # noqa: autoflake - - assert MirrorUploader.IMPLEMENTATION_REGISTRY.get("Amazon S3") - assert MirrorUploader.IMPLEMENTATION_REGISTRY.get("MinIO") diff --git a/tests/core/test_mirror_uploader.py b/tests/core/test_mirror_uploader.py deleted file mode 100644 index 2487397079..0000000000 --- a/tests/core/test_mirror_uploader.py +++ /dev/null @@ -1,240 +0,0 @@ -import pytest - -from core.config import CannotLoadConfiguration -from core.mirror import MirrorUploader -from core.model import ExternalIntegration -from core.model.configuration import ExternalIntegrationLink -from core.s3 import ( - MinIOUploader, - MinIOUploaderConfiguration, - S3Uploader, - S3UploaderConfiguration, -) -from core.util.datetime_helpers import utc_now -from tests.fixtures.database import DatabaseTransactionFixture - - -class DummySuccessUploader(MirrorUploader): - def __init__(self, integration=None): - pass - - def book_url( - self, - identifier, - extension=".epub", - open_access=True, - data_source=None, - title=None, - ): - pass - - def cover_image_url(self, data_source, identifier, filename=None, scaled_size=None): - pass - - def sign_url(self, url, expiration=None): - pass - - def split_url(self, url, unquote=True): - pass - - def do_upload(self, representation): - return None - - -class DummyFailureUploader(MirrorUploader): - def __init__(self, integration=None): - pass - - def book_url( - self, - identifier, - extension=".epub", - open_access=True, - data_source=None, - title=None, - ): - pass - - def cover_image_url(self, data_source, identifier, filename=None, scaled_size=None): - pass - - def sign_url(self, url, expiration=None): - pass - - def split_url(self, url, unquote=True): - pass - - def do_upload(self, representation): - return "I always fail." - - -class TestInitialization: - """Test the ability to get a MirrorUploader for various aspects of site - configuration. - """ - - @staticmethod - def _integration(data: DatabaseTransactionFixture) -> ExternalIntegration: - """Helper method to make a storage ExternalIntegration.""" - storage_name = "some storage" - integration = data.external_integration("my protocol") - integration.goal = ExternalIntegration.STORAGE_GOAL - integration.name = storage_name - return integration - - @pytest.mark.parametrize( - "name,protocol,uploader_class,settings", - [ - ("s3_uploader", ExternalIntegration.S3, S3Uploader, None), - ( - "minio_uploader", - ExternalIntegration.MINIO, - MinIOUploader, - {MinIOUploaderConfiguration.ENDPOINT_URL: "http://localhost"}, - ), - ], - ) - def test_mirror( - self, - db, - name, - protocol, - uploader_class, - settings, - ): - storage_name = "some storage" - # If there's no integration with goal=STORAGE or name=storage_name, - # MirrorUploader.mirror raises an exception. - with pytest.raises(CannotLoadConfiguration) as excinfo: - MirrorUploader.mirror(db.session, storage_name) - assert "No storage integration with name 'some storage' is configured" in str( - excinfo.value - ) - - # If there's only one, mirror() uses it to initialize a - # MirrorUploader. - integration = self._integration(db) - integration.protocol = protocol - - if settings: - for key, value in settings.items(): - integration.setting(key).value = value - - uploader = MirrorUploader.mirror(db.session, integration=integration) - - assert isinstance(uploader, uploader_class) - - def test_integration_by_name(self, db: DatabaseTransactionFixture): - integration = self._integration(db) - - # No name was passed so nothing is found - with pytest.raises(CannotLoadConfiguration) as excinfo: - MirrorUploader.integration_by_name(db.session) - assert "No storage integration with name 'None' is configured" in str( - excinfo.value - ) - - # Correct name was passed - integration = MirrorUploader.integration_by_name(db.session, integration.name) - assert isinstance(integration, ExternalIntegration) - - def test_for_collection(self, db: DatabaseTransactionFixture): - - # This collection has no mirror_integration, so - # there is no MirrorUploader for it. - collection = db.collection() - assert None == MirrorUploader.for_collection( - collection, ExternalIntegrationLink.COVERS - ) - - # This collection has a properly configured mirror_integration, - # so it can have an MirrorUploader. - integration = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - username="username", - password="password", - settings={S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "some-covers"}, - ) - integration_link = db.external_integration_link( - integration=collection._external_integration, - other_integration=integration, - purpose=ExternalIntegrationLink.COVERS, - ) - - uploader = MirrorUploader.for_collection( - collection, ExternalIntegrationLink.COVERS - ) - assert isinstance(uploader, MirrorUploader) - - @pytest.mark.parametrize( - "name,protocol,uploader_class,settings", - [ - ("s3_uploader", ExternalIntegration.S3, S3Uploader, None), - ( - "minio_uploader", - ExternalIntegration.MINIO, - MinIOUploader, - {MinIOUploaderConfiguration.ENDPOINT_URL: "http://localhost"}, - ), - ], - ) - def test_constructor( - self, - db, - name, - protocol, - uploader_class, - settings, - ): - # You can't create a MirrorUploader with an integration - # that's not designed for storage. - integration = self._integration(db) - integration.goal = ExternalIntegration.LICENSE_GOAL - integration.protocol = protocol - - if settings: - for key, value in settings.items(): - integration.setting(key).value = value - with pytest.raises(CannotLoadConfiguration) as excinfo: - uploader_class(integration) - assert "from an integration with goal=licenses" in str(excinfo.value) - - def test_implementation_registry(self, db: DatabaseTransactionFixture): - session = db.session - - # The implementation class used for a given ExternalIntegration - # is controlled by the integration's protocol and the contents - # of the MirrorUploader's implementation registry. - MirrorUploader.IMPLEMENTATION_REGISTRY["my protocol"] = DummyFailureUploader - - integration = self._integration(db) - uploader = MirrorUploader.mirror(session, integration=integration) - assert isinstance(uploader, DummyFailureUploader) - del MirrorUploader.IMPLEMENTATION_REGISTRY["my protocol"] - - -class TestMirrorUploader: - """Test the basic workflow of MirrorUploader.""" - - def test_mirror_batch(self, db: DatabaseTransactionFixture): - r1, ignore = db.representation() - r2, ignore = db.representation() - uploader = DummySuccessUploader() - uploader.mirror_batch([r1, r2]) - assert r1.mirrored_at != None - assert r2.mirrored_at != None - - def test_success_and_then_failure(self, db: DatabaseTransactionFixture): - r, ignore = db.representation() - now = utc_now() - DummySuccessUploader().mirror_one(r, "") - assert r.mirrored_at > now - assert None == r.mirror_exception - - # Even if the original upload succeeds, a subsequent upload - # may fail in a way that leaves the image in an inconsistent - # state. - DummyFailureUploader().mirror_one(r, "") - assert None == r.mirrored_at - assert "I always fail." == r.mirror_exception diff --git a/tests/core/test_s3.py b/tests/core/test_s3.py deleted file mode 100644 index a2f216279a..0000000000 --- a/tests/core/test_s3.py +++ /dev/null @@ -1,1356 +0,0 @@ -import functools -from unittest.mock import MagicMock - -import botocore -import pytest -from botocore.exceptions import BotoCoreError, ClientError - -from core.mirror import MirrorUploader -from core.model import ( - DataSource, - ExternalIntegration, - Hyperlink, - Identifier, - Representation, - create, -) -from core.s3 import ( - MinIOUploader, - MinIOUploaderConfiguration, - MockS3Client, - MultipartS3Upload, - S3AddressingStyle, - S3Uploader, - S3UploaderConfiguration, -) -from core.util.datetime_helpers import datetime_utc, utc_now - -# TODO: we can drop this when we drop support for Python 3.6 and 3.7 -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.s3 import S3UploaderFixture, S3UploaderIntegrationFixture -from tests.fixtures.sample_covers import SampleCoversFixture - - -class TestS3Uploader: - def test_names(self): - # The NAME associated with this class must be the same as its - # key in the MirrorUploader implementation registry, and it's - # better if it's the same as the name of the external - # integration. - assert S3Uploader.NAME == ExternalIntegration.S3 - assert ( - S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] - ) - - def test_instantiation(self, s3_uploader_fixture: S3UploaderFixture): - transaction = s3_uploader_fixture.transaction - - integration = transaction.external_integration( - ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL - ) - integration.username = "your-access-key" - integration.password = "your-secret-key" - integration.setting( - S3UploaderConfiguration.URL_TEMPLATE_KEY - ).value = "a transform" - uploader = MirrorUploader.implementation(integration) - assert True == isinstance(uploader, S3Uploader) - - # The URL_TEMPLATE_KEY setting becomes the .url_transform - # attribute on the S3Uploader object. - assert "a transform" == uploader.url_transform - - @pytest.mark.parametrize( - "name,username,password", - [ - ("empty_credentials", None, None), - ("empty_string_credentials", "", ""), - ("non_empty_string_credentials", "username", "password"), - ], - ) - def test_initialization( - self, s3_uploader_fixture: S3UploaderFixture, name, username, password - ): - # Arrange - transaction = s3_uploader_fixture.transaction - settings = {"username": username, "password": password} - integration = transaction.external_integration( - ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - settings=settings, - ) - client_class = MagicMock() - - # Act - S3Uploader(integration, client_class=client_class) - - # Assert - assert client_class.call_count == 2 - - service_name = client_class.call_args_list[0].args[0] - region_name = client_class.call_args_list[0].kwargs["region_name"] - aws_access_key_id = client_class.call_args_list[0].kwargs["aws_access_key_id"] - aws_secret_access_key = client_class.call_args_list[0].kwargs[ - "aws_secret_access_key" - ] - config = client_class.call_args_list[0].kwargs["config"] - assert service_name == "s3" - assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION - assert aws_access_key_id == None - assert aws_secret_access_key == None - assert config.signature_version == botocore.UNSIGNED - assert ( - config.s3["addressing_style"] - == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE - ) - - service_name = client_class.call_args_list[1].args[0] - region_name = client_class.call_args_list[1].kwargs["region_name"] - aws_access_key_id = client_class.call_args_list[1].kwargs["aws_access_key_id"] - aws_secret_access_key = client_class.call_args_list[1].kwargs[ - "aws_secret_access_key" - ] - assert service_name == "s3" - assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION - assert aws_access_key_id == (username if username != "" else None) - assert aws_secret_access_key == (password if password != "" else None) - assert "config" not in client_class.call_args_list[1].kwargs - - def test_custom_client_class(self, s3_uploader_fixture: S3UploaderFixture): - """You can specify a client class to use instead of boto3.client.""" - integration = s3_uploader_fixture.integration() - uploader = S3Uploader(integration, MockS3Client) - assert isinstance(uploader.client, MockS3Client) - - def test_get_bucket(self, s3_uploader_fixture: S3UploaderFixture): - buckets = { - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "banana", - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "bucket", - } - buckets_plus_irrelevant_setting = dict(buckets) - buckets_plus_irrelevant_setting["not-a-bucket-at-all"] = "value" - uploader = s3_uploader_fixture.create_s3_uploader( - **buckets_plus_irrelevant_setting - ) - - # This S3Uploader knows about the configured buckets. It - # wasn't informed of the irrelevant 'not-a-bucket-at-all' - # setting. - assert buckets == uploader.buckets - - # get_bucket just does a lookup in .buckets - uploader.buckets["foo"] = object() - result = uploader.get_bucket("foo") - assert uploader.buckets["foo"] == result - - @pytest.mark.parametrize( - "name,bucket,path,expected_result,region,addressing_style", - [ - ( - "s3_url_with_path_without_slash", - "a-bucket", - "a-path", - "https://a-bucket.s3.amazonaws.com/a-path", - None, - None, - ), - ( - "s3_dummy_url_with_path_without_slash", - "dummy", - "dummy", - "https://dummy.s3.amazonaws.com/dummy", - None, - None, - ), - ( - "s3_path_style_url_with_path_without_slash", - "a-bucket", - "a-path", - "https://s3.amazonaws.com/a-bucket/a-path", - None, - S3AddressingStyle.PATH.value, - ), - ( - "s3_path_style_dummy_url_with_path_without_slash", - "dummy", - "dummy", - "https://s3.amazonaws.com/dummy/dummy", - None, - S3AddressingStyle.PATH.value, - ), - ( - "s3_url_with_path_with_slash", - "a-bucket", - "/a-path", - "https://a-bucket.s3.amazonaws.com/a-path", - None, - None, - ), - ( - "s3_path_style_url_with_path_with_slash", - "a-bucket", - "/a-path", - "https://s3.amazonaws.com/a-bucket/a-path", - None, - S3AddressingStyle.PATH.value, - ), - ( - "s3_url_with_custom_region_and_path_without_slash", - "a-bucket", - "a-path", - "https://a-bucket.s3.us-east-2.amazonaws.com/a-path", - "us-east-2", - None, - ), - ( - "s3_path_style_url_with_custom_region_and_path_without_slash", - "a-bucket", - "a-path", - "https://s3.us-east-2.amazonaws.com/a-bucket/a-path", - "us-east-2", - S3AddressingStyle.PATH.value, - ), - ( - "s3_url_with_custom_region_and_path_with_slash", - "a-bucket", - "/a-path", - "https://a-bucket.s3.us-east-3.amazonaws.com/a-path", - "us-east-3", - None, - ), - ( - "s3_path_style_url_with_custom_region_and_path_with_slash", - "a-bucket", - "/a-path", - "https://s3.us-east-3.amazonaws.com/a-bucket/a-path", - "us-east-3", - S3AddressingStyle.PATH.value, - ), - ( - "custom_http_url_and_path_without_slash", - "http://a-bucket.com/", - "a-path", - "http://a-bucket.com/a-path", - None, - None, - ), - ( - "custom_http_url_and_path_with_slash", - "http://a-bucket.com/", - "/a-path", - "http://a-bucket.com/a-path", - None, - None, - ), - ( - "custom_http_url_and_path_without_slash", - "https://a-bucket.com/", - "a-path", - "https://a-bucket.com/a-path", - None, - None, - ), - ( - "custom_http_url_and_path_with_slash", - "https://a-bucket.com/", - "/a-path", - "https://a-bucket.com/a-path", - None, - None, - ), - ], - ) - def test_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - path, - expected_result, - region, - addressing_style, - ): - # Arrange - uploader = s3_uploader_fixture.create_s3_uploader( - region=region, addressing_style=addressing_style - ) - - # Act - result = uploader.url(bucket, path) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,bucket,key,expected_result,url_transform,region", - [ - ( - "implicit_s3_url_template", - "bucket", - "the key", - "https://bucket.s3.amazonaws.com/the%20key", - None, - None, - ), - ( - "implicit_s3_url_template_with_custom_region", - "bucket", - "the key", - "https://bucket.s3.us-east-2.amazonaws.com/the%20key", - None, - "us-east-2", - ), - ( - "explicit_s3_url_template", - "bucket", - "the key", - "https://bucket.s3.amazonaws.com/the%20key", - S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, - None, - ), - ( - "explicit_s3_url_template_with_custom_region", - "bucket", - "the key", - "https://bucket.s3.us-east-2.amazonaws.com/the%20key", - S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, - "us-east-2", - ), - ( - "http_url_template", - "bucket", - "the këy", - "http://bucket/the%20k%C3%ABy", - S3UploaderConfiguration.URL_TEMPLATE_HTTP, - None, - ), - ( - "https_url_template", - "bucket", - "the këy", - "https://bucket/the%20k%C3%ABy", - S3UploaderConfiguration.URL_TEMPLATE_HTTPS, - None, - ), - ], - ) - def test_final_mirror_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - key, - expected_result, - url_transform, - region, - ): - # Arrange - uploader = s3_uploader_fixture.create_s3_uploader(region=region) - - if url_transform: - uploader.url_transform = url_transform - - # Act - result = uploader.final_mirror_url(bucket, key) - - # Assert - if not url_transform: - assert ( - S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform - ) - - assert result == expected_result - - def test_key_join(self): - """Test the code used to build S3 keys from parts.""" - parts = ["Gutenberg", b"Gutenberg ID", 1234, "Die Flügelmaus+.epub"] - assert ( - "Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub" - == S3Uploader.key_join(parts) - ) - - @pytest.mark.parametrize( - "name,bucket,data_source_name,expected_result,scaled_size,region,", - [ - ( - "with_gutenberg_cover_generator_data_source", - "test-book-covers-s3-bucket", - DataSource.GUTENBERG_COVER_GENERATOR, - "https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/", - None, - None, - ), - ( - "with_overdrive_data_source", - "test-book-covers-s3-bucket", - DataSource.OVERDRIVE, - "https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/", - None, - None, - ), - ( - "with_overdrive_data_source_and_scaled_size", - "test-book-covers-s3-bucket", - DataSource.OVERDRIVE, - "https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/", - 300, - None, - ), - ( - "with_gutenberg_cover_generator_data_source_and_custom_region", - "test-book-covers-s3-bucket", - DataSource.GUTENBERG_COVER_GENERATOR, - "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/", - None, - "us-east-3", - ), - ( - "with_overdrive_data_source_and_custom_region", - "test-book-covers-s3-bucket", - DataSource.OVERDRIVE, - "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/", - None, - "us-east-3", - ), - ( - "with_overdrive_data_source_and_scaled_size_and_custom_region", - "test-book-covers-s3-bucket", - DataSource.OVERDRIVE, - "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/", - 300, - "us-east-3", - ), - ], - ) - def test_cover_image_root( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - data_source_name, - expected_result, - scaled_size, - region, - ): - # Arrange - session = s3_uploader_fixture.transaction.session - uploader = s3_uploader_fixture.create_s3_uploader(region=region) - data_source = DataSource.lookup(session, data_source_name) - - # Act - result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,bucket,expected_result,region", - [ - ( - "with_default_region", - "test-open-access-s3-bucket", - "https://test-open-access-s3-bucket.s3.amazonaws.com/", - None, - ), - ( - "with_custom_region", - "test-open-access-s3-bucket", - "https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/", - "us-east-3", - ), - ], - ) - def test_content_root( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - expected_result, - region, - ): - # Arrange - uploader = s3_uploader_fixture.create_s3_uploader(region=region) - - # Act - result = uploader.content_root(bucket) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,bucket,library_name,expected_result,region", - [ - ( - "s3_url", - "test-marc-s3-bucket", - "SHORT", - "https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/", - None, - ), - ( - "s3_url_with_custom_region", - "test-marc-s3-bucket", - "SHORT", - "https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/", - "us-east-2", - ), - ( - "custom_http_url", - "http://my-feed/", - "SHORT", - "http://my-feed/SHORT/", - None, - ), - ( - "custom_https_url", - "https://my-feed/", - "SHORT", - "https://my-feed/SHORT/", - None, - ), - ], - ) - def test_marc_file_root( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - library_name, - expected_result, - region, - ): - # Arrange - uploader = s3_uploader_fixture.create_s3_uploader(region=region) - library = s3_uploader_fixture.transaction.library(short_name=library_name) - - # Act - result = uploader.marc_file_root(bucket, library) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,buckets,identifier,expected_result,extension,data_source_name,title,region,open_access", - [ - ( - "with_identifier", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub", - None, - None, - None, - None, - True, - ), - ( - "with_custom_extension", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf", - "pdf", - None, - None, - None, - True, - ), - ( - "with_custom_dotted_extension", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf", - ".pdf", - None, - None, - None, - True, - ), - ( - "with_custom_data_source", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub", - None, - DataSource.UNGLUE_IT, - None, - None, - True, - ), - ( - "with_custom_title", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub", - None, - None, - "On Books", - None, - True, - ), - ( - "with_custom_extension_and_title_and_data_source", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", - ".pdf", - DataSource.UNGLUE_IT, - "On Books", - None, - True, - ), - ( - "with_custom_extension_and_title_and_data_source_and_region", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", - ".pdf", - DataSource.UNGLUE_IT, - "On Books", - "us-east-3", - True, - ), - ( - "with_protected_access_and_custom_extension_and_title_and_data_source_and_region", - {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", - ".pdf", - DataSource.UNGLUE_IT, - "On Books", - "us-east-3", - False, - ), - ], - ) - def test_book_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - buckets, - identifier, - expected_result, - extension, - data_source_name, - title, - region, - open_access, - ): - # Arrange - transaction = s3_uploader_fixture.transaction - identifier = transaction.identifier(foreign_id=identifier) - uploader = s3_uploader_fixture.create_s3_uploader(region=region, **buckets) - - parameters = {"identifier": identifier, "open_access": open_access} - - if extension: - parameters["extension"] = extension - if title: - parameters["title"] = title - - if data_source_name: - data_source = DataSource.lookup(transaction.session, DataSource.UNGLUE_IT) - parameters["data_source"] = data_source - - # Act - result = uploader.book_url(**parameters) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,buckets,data_source_name,identifier,filename,expected_result,scaled_size,region", - [ - ( - "without_scaled_size", - {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, - DataSource.UNGLUE_IT, - "ABOOK", - "filename", - "https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename", - None, - None, - ), - ( - "without_scaled_size_and_with_custom_region", - {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, - DataSource.UNGLUE_IT, - "ABOOK", - "filename", - "https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename", - None, - "us-east-3", - ), - ( - "with_scaled_size", - {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, - DataSource.UNGLUE_IT, - "ABOOK", - "filename", - "https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename", - 601, - None, - ), - ( - "with_scaled_size_and_custom_region", - {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, - DataSource.UNGLUE_IT, - "ABOOK", - "filename", - "https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename", - 601, - "us-east-3", - ), - ], - ) - def test_cover_image_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - buckets, - data_source_name, - identifier, - filename, - expected_result, - scaled_size, - region, - ): - transaction = s3_uploader_fixture.transaction - - # identifier = self._identifier(foreign_id="ABOOK") - # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} - # uploader = self._uploader(**buckets) - # m = uploader.cover_image_url - # - # unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) - # identifier = self._identifier(foreign_id="ABOOK") - # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', - # m(unglueit, identifier, "filename", scaled_size=601)) - - # Arrange - data_source = DataSource.lookup(transaction.session, data_source_name) - identifier = transaction.identifier(foreign_id=identifier) - uploader = s3_uploader_fixture.create_s3_uploader(region=region, **buckets) - - # Act - result = uploader.cover_image_url( - data_source, identifier, filename, scaled_size=scaled_size - ) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,bucket,library_name,lane_name,end_time,expected_result,start_time,region", - [ - ( - "with_s3_bucket_and_end_time", - "marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 1, 0, 0, 0), - "https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc", - None, - None, - ), - ( - "with_s3_bucket_and_end_time_and_start_time", - "marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 2, 0, 0, 0), - "https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", - datetime_utc(2020, 1, 1, 0, 0, 0), - None, - ), - ( - "with_s3_bucket_and_end_time_and_start_time_and_custom_region", - "marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 2, 0, 0, 0), - "https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", - datetime_utc(2020, 1, 1, 0, 0, 0), - "us-east-2", - ), - ( - "with_http_bucket_and_end_time_and_start_time", - "http://marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 2, 0, 0, 0), - "http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", - datetime_utc(2020, 1, 1, 0, 0, 0), - None, - ), - ( - "with_https_bucket_and_end_time_and_start_time", - "https://marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 2, 0, 0, 0), - "https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", - datetime_utc(2020, 1, 1, 0, 0, 0), - None, - ), - ], - ) - def test_marc_file_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - library_name, - lane_name, - end_time, - expected_result, - start_time, - region, - ): - # Arrange - transaction = s3_uploader_fixture.transaction - library = transaction.library(short_name=library_name) - lane = transaction.lane(display_name=lane_name) - buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} - uploader = s3_uploader_fixture.create_s3_uploader(region=region, **buckets) - - # Act - result = uploader.marc_file_url(library, lane, end_time, start_time) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,url,expected_result,unquote", - [ - ( - "s3_path_style_request_without_region", - "https://s3.amazonaws.com/bucket/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "s3_path_style_request_with_region", - "https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "s3_virtual_hosted_style_request_with_global_endpoint", - "https://bucket.s3.amazonaws.com/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "s3_virtual_hosted_style_request_with_dashed_region", - "https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "s3_virtual_hosted_style_request_with_dotted_region", - "https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "http_url", - "http://book-covers.nypl.org/directory/filename.jpg", - ("book-covers.nypl.org", "directory/filename.jpg"), - True, - ), - ( - "https_url", - "https://book-covers.nypl.org/directory/filename.jpg", - ("book-covers.nypl.org", "directory/filename.jpg"), - True, - ), - ( - "http_url_with_escaped_symbols", - "http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg", - ("book-covers.nypl.org", "directory/filename with spaces!.jpg"), - True, - ), - ( - "http_url_with_escaped_symbols_but_unquote_set_to_false", - "http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg", - ("book-covers.nypl.org", "directory/filename+with+spaces%21.jpg"), - False, - ), - ], - ) - def test_split_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - url, - expected_result, - unquote, - ): - # Arrange - s3_uploader = s3_uploader_fixture.create_s3_uploader() - - # Act - result = s3_uploader.split_url(url, unquote) - - # Assert - assert result == expected_result - - def test_mirror_one( - self, - s3_uploader_fixture: S3UploaderFixture, - sample_covers_fixture: SampleCoversFixture, - ): - transaction = s3_uploader_fixture.transaction - - edition, pool = transaction.edition(with_license_pool=True) - original_cover_location = "http://example.com/a-cover.png" - content = open( - sample_covers_fixture.sample_cover_path("test-book-cover.png"), "rb" - ).read() - cover, ignore = pool.add_link( - Hyperlink.IMAGE, - original_cover_location, - edition.data_source, - Representation.PNG_MEDIA_TYPE, - content=content, - ) - cover_rep = cover.resource.representation - assert None == cover_rep.mirrored_at - - original_epub_location = "https://books.com/a-book.epub" - epub, ignore = pool.add_link( - Hyperlink.OPEN_ACCESS_DOWNLOAD, - original_epub_location, - edition.data_source, - Representation.EPUB_MEDIA_TYPE, - content="i'm an epub", - ) - epub_rep = epub.resource.representation - assert None == epub_rep.mirrored_at - - s3 = s3_uploader_fixture.create_s3_uploader(client_class=MockS3Client) - - # Mock final_mirror_url so we can verify that it's called with - # the right arguments - def mock_final_mirror_url(bucket, key): - return "final_mirror_url was called with bucket {}, key {}".format( - bucket, key - ) - - s3.final_mirror_url = mock_final_mirror_url - - book_url = "http://books-go/here.epub" - cover_url = "http://s3.amazonaws.com/covers-go/here.png" - s3.mirror_one(cover.resource.representation, cover_url) - s3.mirror_one(epub.resource.representation, book_url) - [ - [data1, bucket1, key1, args1, ignore1], - [data2, bucket2, key2, args2, ignore2], - ] = s3.client.uploads - - # Both representations have had .mirror_url set and been - # mirrored to those URLs. - assert data1.startswith(b"\x89") - assert "covers-go" == bucket1 - assert "here.png" == key1 - assert Representation.PNG_MEDIA_TYPE == args1["ContentType"] - assert (utc_now() - cover_rep.mirrored_at).seconds < 10 - - assert b"i'm an epub" == data2 - assert "books-go" == bucket2 - assert "here.epub" == key2 - assert Representation.EPUB_MEDIA_TYPE == args2["ContentType"] - - # In both cases, mirror_url was set to the result of final_mirror_url. - assert ( - "final_mirror_url was called with bucket books-go, key here.epub" - == epub_rep.mirror_url - ) - assert ( - "final_mirror_url was called with bucket covers-go, key here.png" - == cover_rep.mirror_url - ) - - # mirrored-at was set when the representation was 'mirrored' - for rep in epub_rep, cover_rep: - assert (utc_now() - rep.mirrored_at).seconds < 10 - - def test_mirror_failure(self, s3_uploader_fixture: S3UploaderFixture): - transaction = s3_uploader_fixture.transaction - - edition, pool = transaction.edition(with_license_pool=True) - original_epub_location = "https://books.com/a-book.epub" - epub, ignore = pool.add_link( - Hyperlink.OPEN_ACCESS_DOWNLOAD, - original_epub_location, - edition.data_source, - Representation.EPUB_MEDIA_TYPE, - content="i'm an epub", - ) - epub_rep = epub.resource.representation - - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - - # A network failure is treated as a transient error. - uploader.client.fail_with = BotoCoreError() - uploader.mirror_one(epub_rep, transaction.fresh_url()) - assert None == epub_rep.mirrored_at - assert None == epub_rep.mirror_exception - - # An S3 credential failure is treated as a transient error. - response = dict( - Error=dict( - Code=401, - Message="Bad credentials", - ) - ) - uploader.client.fail_with = ClientError(response, "SomeOperation") # type: ignore[arg-type] - uploader.mirror_one(epub_rep, transaction.fresh_url()) - assert None == epub_rep.mirrored_at - assert None == epub_rep.mirror_exception - - # Because the file was not successfully uploaded, - # final_mirror_url was never called and mirror_url is - # was not set. - assert None == epub_rep.mirror_url - - # A bug in the code is not treated as a transient error -- - # the exception propagates through. - uploader.client.fail_with = Exception("crash!") - pytest.raises(Exception, uploader.mirror_one, epub_rep, transaction.fresh_url()) - - def test_svg_mirroring(self, s3_uploader_fixture: S3UploaderFixture): - transaction = s3_uploader_fixture.transaction - - edition, pool = transaction.edition(with_license_pool=True) - original = transaction.fresh_url() - - # Create an SVG cover for the book. - svg = """ - - - -""" - hyperlink, ignore = pool.add_link( - Hyperlink.IMAGE, - original, - edition.data_source, - Representation.SVG_MEDIA_TYPE, - content=svg, - ) - - # 'Upload' it to S3. - s3 = s3_uploader_fixture.create_s3_uploader(MockS3Client) - s3.mirror_one(hyperlink.resource.representation, transaction.fresh_url()) - [[data, bucket, key, args, ignore]] = s3.client.uploads - - assert Representation.SVG_MEDIA_TYPE == args["ContentType"] - assert b"svg" in data - assert b"PNG" not in data - - def test_multipart_upload(self, s3_uploader_fixture: S3UploaderFixture): - transaction = s3_uploader_fixture.transaction - - class MockMultipartS3Upload(MultipartS3Upload): - completed = None - aborted = None - - def __init__(self, uploader, representation, mirror_to): - self.parts = [] - MockMultipartS3Upload.completed = False - MockMultipartS3Upload.aborted = False - - def upload_part(self, content): - self.parts.append(content) - - def complete(self): - MockMultipartS3Upload.completed = True - - def abort(self): - MockMultipartS3Upload.aborted = True - - rep, ignore = create( - transaction.session, - Representation, - url="http://books.mrc", - media_type=Representation.MARC_MEDIA_TYPE, - ) - - s3 = s3_uploader_fixture.create_s3_uploader(MockS3Client) - - # Successful upload - with s3.multipart_upload( - rep, rep.url, upload_class=MockMultipartS3Upload - ) as upload: - assert [] == upload.parts - assert False == upload.completed - assert False == upload.aborted - - upload.upload_part("Part 1") - upload.upload_part("Part 2") - - assert ["Part 1", "Part 2"] == upload.parts - - assert True == MockMultipartS3Upload.completed - assert False == MockMultipartS3Upload.aborted - assert None == rep.mirror_exception - - class FailingMultipartS3Upload(MockMultipartS3Upload): - def upload_part(self, content): - raise Exception("Error!") - - # Failed during upload - with s3.multipart_upload( - rep, rep.url, upload_class=FailingMultipartS3Upload - ) as upload: - upload.upload_part("Part 1") - - assert False == MockMultipartS3Upload.completed - assert True == MockMultipartS3Upload.aborted - assert "Error!" == rep.mirror_exception - - class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): - def complete(self): - raise Exception("Error!") - - rep.mirror_exception = None - # Failed during completion - with s3.multipart_upload( - rep, rep.url, upload_class=AnotherFailingMultipartS3Upload - ) as upload: - upload.upload_part("Part 1") - - assert False == MockMultipartS3Upload.completed - assert True == MockMultipartS3Upload.aborted - assert "Error!" == rep.mirror_exception - - @pytest.mark.parametrize( - "name,expiration_settings,expected_expiration", - [ - ( - "default_expiration_parameter", - None, - int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), - ), - ( - "empty_expiration_parameter", - {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, - 100, - ), - ], - ) - def test_sign_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - expiration_settings, - expected_expiration, - ): - # Arrange - region = "us-east-1" - bucket = "bucket" - filename = "filename" - url = f"https://{bucket}.s3.{region}.amazonaws.com/{filename}" - expected_url = url + "?AWSAccessKeyId=KEY&Expires=1&Signature=S" - settings = expiration_settings if expiration_settings else {} - s3_uploader = s3_uploader_fixture.create_s3_uploader(region=region, **settings) - s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) - s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) - - # Act - result = s3_uploader.sign_url(url) - - # Assert - assert result == expected_url - s3_uploader.split_url.assert_called_once_with(url) - s3_uploader.client.generate_presigned_url.assert_called_once_with( - "get_object", - ExpiresIn=expected_expiration, - Params={"Bucket": bucket, "Key": filename}, - ) - - -class TestMultiPartS3Upload: - @staticmethod - def _representation(transaction: DatabaseTransactionFixture): - rep, ignore = create( - transaction.session, - Representation, - url="http://bucket/books.mrc", - media_type=Representation.MARC_MEDIA_TYPE, - ) - return rep - - def test_init(self, s3_uploader_fixture: S3UploaderFixture): - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - rep = self._representation(s3_uploader_fixture.transaction) - upload = MultipartS3Upload(uploader, rep, rep.url) - assert uploader == upload.uploader - assert rep == upload.representation - assert "bucket" == upload.bucket - assert "books.mrc" == upload.filename - assert 1 == upload.part_number - assert [] == upload.parts - assert 1 == upload.upload.get("UploadId") - - uploader.client.fail_with = Exception("Error!") - pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url) - - def test_upload_part(self, s3_uploader_fixture: S3UploaderFixture): - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - rep = self._representation(s3_uploader_fixture.transaction) - upload = MultipartS3Upload(uploader, rep, rep.url) - upload.upload_part("Part 1") - upload.upload_part("Part 2") - assert [ - { - "Body": "Part 1", - "UploadId": 1, - "PartNumber": 1, - "Bucket": "bucket", - "Key": "books.mrc", - }, - { - "Body": "Part 2", - "UploadId": 1, - "PartNumber": 2, - "Bucket": "bucket", - "Key": "books.mrc", - }, - ] == uploader.client.parts - assert 3 == upload.part_number - assert [ - {"ETag": "etag", "PartNumber": 1}, - {"ETag": "etag", "PartNumber": 2}, - ] == upload.parts - - uploader.client.fail_with = Exception("Error!") - pytest.raises(Exception, upload.upload_part, "Part 3") - - def test_complete(self, s3_uploader_fixture: S3UploaderFixture): - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - rep = self._representation(s3_uploader_fixture.transaction) - upload = MultipartS3Upload(uploader, rep, rep.url) - upload.upload_part("Part 1") - upload.upload_part("Part 2") - upload.complete() - assert [ - { - "Bucket": "bucket", - "Key": "books.mrc", - "UploadId": 1, - "MultipartUpload": { - "Parts": [ - {"ETag": "etag", "PartNumber": 1}, - {"ETag": "etag", "PartNumber": 2}, - ], - }, - } - ] == uploader.client.uploads - - def test_abort(self, s3_uploader_fixture: S3UploaderFixture): - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - rep = self._representation(s3_uploader_fixture.transaction) - upload = MultipartS3Upload(uploader, rep, rep.url) - upload.upload_part("Part 1") - upload.upload_part("Part 2") - upload.abort() - assert [] == uploader.client.parts - - -@pytest.mark.minio -class TestS3UploaderIntegration: - @pytest.mark.parametrize( - "name,uploader_class,bucket_type,bucket_name,open_access,settings", - [ - ( - "using_s3_uploader_and_open_access_bucket", - functools.partial( - S3Uploader, - host=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_HOST, - ), - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, - "test-bucket", - True, - None, - ), - ( - "using_s3_uploader_and_protected_access_bucket", - functools.partial( - S3Uploader, - host=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_HOST, - ), - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, - "test-bucket", - False, - None, - ), - ( - "using_minio_uploader_and_open_access_bucket", - MinIOUploader, - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, - "test-bucket", - True, - { - MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL - }, - ), - ( - "using_minio_uploader_and_protected_access_bucket", - MinIOUploader, - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, - "test-bucket", - False, - { - MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL - }, - ), - ], - ) - def test_mirror( - self, - s3_uploader_integration_fixture: S3UploaderIntegrationFixture, - name, - uploader_class, - bucket_type, - bucket_name, - open_access, - settings, - ): - fixture = s3_uploader_integration_fixture - - # Arrange - book_title = "1234567890" - book_content = b"1234567890" - identifier = Identifier(type=Identifier.ISBN, identifier=book_title) - representation = Representation( - content=book_content, media_type=Representation.EPUB_MEDIA_TYPE - ) - buckets = { - bucket_type: bucket_name, - } - - if settings: - settings.update(buckets) - else: - settings = buckets - - s3_uploader = fixture.create_s3_uploader( - uploader_class=uploader_class, **settings - ) - - fixture.minio_s3_client.create_bucket(Bucket=bucket_name) - - # Act - book_url = s3_uploader.book_url(identifier, open_access=open_access) - s3_uploader.mirror_one(representation, book_url) - - # Assert - response = fixture.minio_s3_client.list_objects(Bucket=bucket_name) - assert "Contents" in response - assert len(response["Contents"]) == 1 - - [object] = response["Contents"] - - assert object["Key"] == f"ISBN/{book_title}.epub" diff --git a/tests/core/test_s3_analytics_provider.py b/tests/core/test_s3_analytics_provider.py index e07e0b45a7..9ed40e782a 100644 --- a/tests/core/test_s3_analytics_provider.py +++ b/tests/core/test_s3_analytics_provider.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import datetime import json -from unittest.mock import create_autospec, patch +from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest @@ -11,31 +14,38 @@ CirculationEvent, DataSource, ExternalIntegration, - ExternalIntegrationLink, MediaTypes, create, ) -from core.s3 import S3Uploader, S3UploaderConfiguration -from tests.fixtures.database import DatabaseTransactionFixture + +if TYPE_CHECKING: + from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.services import MockServicesFixture class S3AnalyticsFixture: - def __init__(self, db: DatabaseTransactionFixture) -> None: + def __init__( + self, db: DatabaseTransactionFixture, services_fixture: MockServicesFixture + ) -> None: self.db = db - self._analytics_integration, _ = create( + self.analytics_integration, _ = create( db.session, ExternalIntegration, goal=ExternalIntegration.ANALYTICS_GOAL, protocol=S3AnalyticsProvider.__module__, ) - self._analytics_provider = S3AnalyticsProvider( - self._analytics_integration, db.default_library() + self.services = services_fixture.services + self.analytics_storage = services_fixture.storage.analytics + self.analytics_provider = S3AnalyticsProvider( + self.analytics_integration, self.services, db.default_library() ) @pytest.fixture(scope="function") -def s3_analytics_fixture(db: DatabaseTransactionFixture): - return S3AnalyticsFixture(db) +def s3_analytics_fixture( + db: DatabaseTransactionFixture, mock_services_fixture: MockServicesFixture +): + return S3AnalyticsFixture(db, mock_services_fixture) class TestS3AnalyticsProvider: @@ -51,63 +61,22 @@ def timestamp_to_string(timestamp): """ return str(timestamp) - def test_exception_is_raised_when_there_is_no_external_integration_link( - self, s3_analytics_fixture: S3AnalyticsFixture - ): - # Act, Assert - with pytest.raises(CannotLoadConfiguration): - s3_analytics_fixture._analytics_provider.collect_event( - s3_analytics_fixture.db.default_library(), - None, - CirculationEvent.NEW_PATRON, - datetime.datetime.utcnow(), - ) - - def test_exception_is_raised_when_there_is_no_storage_integration( - self, s3_analytics_fixture: S3AnalyticsFixture - ): - # Arrange - # Create an external integration link but don't create a storage integration - create( - s3_analytics_fixture.db.session, - ExternalIntegrationLink, - external_integration_id=s3_analytics_fixture._analytics_integration.id, - purpose=ExternalIntegrationLink.ANALYTICS, - ) - - # Act, Assert - with pytest.raises(CannotLoadConfiguration): - s3_analytics_fixture._analytics_provider.collect_event( - s3_analytics_fixture.db.default_library(), - None, - CirculationEvent.NEW_PATRON, - datetime.datetime.utcnow(), - ) - - def test_exception_is_raised_when_there_is_no_analytics_bucket( + def test_exception_is_raised_when_no_analytics_bucket_configured( self, s3_analytics_fixture: S3AnalyticsFixture ): - # Arrange - # Create a storage service - storage_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.STORAGE_GOAL, - protocol=ExternalIntegration.S3, - ) + # The services container returns None when there is no analytics storage service configured, + # so we override the analytics storage service with None to simulate this situation. + s3_analytics_fixture.services.storage.analytics.override(None) - # Create an external integration link to the storage service - create( - s3_analytics_fixture.db.session, - ExternalIntegrationLink, - external_integration_id=s3_analytics_fixture._analytics_integration.id, - other_integration_id=storage_integration.id, - purpose=ExternalIntegrationLink.ANALYTICS, + provider = S3AnalyticsProvider( + s3_analytics_fixture.analytics_integration, + s3_analytics_fixture.services, + s3_analytics_fixture.db.default_library(), ) # Act, Assert with pytest.raises(CannotLoadConfiguration): - s3_analytics_fixture._analytics_provider.collect_event( + provider.collect_event( s3_analytics_fixture.db.default_library(), None, CirculationEvent.NEW_PATRON, @@ -117,189 +86,116 @@ def test_exception_is_raised_when_there_is_no_analytics_bucket( def test_analytics_data_without_associated_license_pool_is_correctly_stored_in_s3( self, s3_analytics_fixture: S3AnalyticsFixture ): - # Arrange - # Create an S3 Analytics integration - analytics_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=S3AnalyticsProvider.__module__, - ) - # Create an S3 Analytics provider - provider = S3AnalyticsProvider( - analytics_integration, s3_analytics_fixture.db.default_library() - ) + # Set up event's metadata + event_time = datetime.datetime.utcnow() + event_time_formatted = self.timestamp_to_string(event_time) + event_type = CirculationEvent.NEW_PATRON - # Create an S3 storage service - storage_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.STORAGE_GOAL, - protocol=ExternalIntegration.S3, - ) - # Set up a bucket name used for storing analytics data - storage_integration.setting( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ).value = "analytics" + s3_analytics_fixture.analytics_provider._get_file_key = MagicMock() - # Create a link to the S3 storage service - create( - s3_analytics_fixture.db.session, - ExternalIntegrationLink, - external_integration_id=analytics_integration.id, - other_integration_id=storage_integration.id, - purpose=ExternalIntegrationLink.ANALYTICS, + # Act + s3_analytics_fixture.analytics_provider.collect_event( + s3_analytics_fixture.db.default_library(), None, event_type, event_time ) - # Set up a mock instead of real S3Uploader class acting as the S3 storage service - s3_uploader = create_autospec(spec=S3Uploader) - - with patch("core.mirror.MirrorUploader.implementation") as mock_implementation: - mock_implementation.return_value = s3_uploader - # Set up event's metadata - event_time = datetime.datetime.utcnow() - event_time_formatted = self.timestamp_to_string(event_time) - event_type = CirculationEvent.NEW_PATRON - - # Act - provider.collect_event( - s3_analytics_fixture.db.default_library(), None, event_type, event_time - ) - - # Assert - s3_uploader.analytics_file_url.assert_called_once_with( - s3_analytics_fixture.db.default_library(), None, event_type, event_time - ) - s3_uploader.mirror_one.assert_called_once() - representation, _ = s3_uploader.mirror_one.call_args[0] - - assert MediaTypes.APPLICATION_JSON_MEDIA_TYPE == representation.media_type - - content = representation.content - event = json.loads(content) - - assert event_type == event["type"] - assert event_time_formatted == event["start"] - assert event_time_formatted == event["end"] - assert s3_analytics_fixture.db.default_library().id == event["library_id"] + # Assert + s3_analytics_fixture.analytics_provider._get_file_key.assert_called_once_with( + s3_analytics_fixture.db.default_library(), None, event_type, event_time + ) + s3_analytics_fixture.analytics_storage.store.assert_called_once() + ( + key, + content, + content_type, + ) = s3_analytics_fixture.analytics_storage.store.call_args.args + + assert content_type == MediaTypes.APPLICATION_JSON_MEDIA_TYPE + assert key == s3_analytics_fixture.analytics_provider._get_file_key.return_value + event = json.loads(content) + + assert event["type"] == event_type + assert event["start"] == event_time_formatted + assert event["end"] == event_time_formatted + assert event["library_id"] == s3_analytics_fixture.db.default_library().id def test_analytics_data_with_associated_license_pool_is_correctly_stored_in_s3( self, s3_analytics_fixture: S3AnalyticsFixture ): - # Arrange - # Create an S3 Analytics integration - analytics_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=S3AnalyticsProvider.__module__, + # Create a test book + work = s3_analytics_fixture.db.work( + data_source_name=DataSource.GUTENBERG, + title="Test Book", + authors=("Test Author 1", "Test Author 2"), + genre="Test Genre", + language="eng", + audience=Classifier.AUDIENCE_ADULT, + with_license_pool=True, ) - # Create an S3 Analytics provider - provider = S3AnalyticsProvider( - analytics_integration, s3_analytics_fixture.db.default_library() + license_pool = work.license_pools[0] + edition = work.presentation_edition + + # Set up event's metadata + event_time = datetime.datetime.utcnow() + event_time_formatted = self.timestamp_to_string(event_time) + event_type = CirculationEvent.CM_CHECKOUT + + s3_analytics_fixture.analytics_provider._get_file_key = MagicMock() + + # Act + s3_analytics_fixture.analytics_provider.collect_event( + s3_analytics_fixture.db.default_library(), + license_pool, + event_type, + event_time, ) - # Create an S3 storage service - storage_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.STORAGE_GOAL, - protocol=ExternalIntegration.S3, + # Assert + s3_analytics_fixture.analytics_storage.store.assert_called_once() + ( + key, + content, + content_type, + ) = s3_analytics_fixture.analytics_storage.store.call_args.args + + assert content_type == MediaTypes.APPLICATION_JSON_MEDIA_TYPE + assert key == s3_analytics_fixture.analytics_provider._get_file_key.return_value + + event = json.loads(content) + data_source = license_pool.data_source if license_pool else None + identifier = license_pool.identifier if license_pool else None + collection = license_pool.collection if license_pool else None + work = license_pool.work if license_pool else None + + assert event["type"] == event_type + assert event["start"] == event_time_formatted + assert event["end"] == event_time_formatted + assert event["library_id"] == s3_analytics_fixture.db.default_library().id + assert event["license_pool_id"] == license_pool.id + assert event["publisher"] == edition.publisher + assert event["imprint"] == edition.imprint + assert event["issued"] == edition.issued + assert event["published"] == edition.published + assert event["medium"] == edition.medium + assert event["collection"] == collection.name + assert event["identifier_type"] == identifier.type + assert event["identifier"] == identifier.identifier + assert event["data_source"] == data_source.name + assert event["audience"] == work.audience + assert event["fiction"] == work.fiction + assert event["summary_text"] == work.summary_text + assert event["quality"] == work.quality + assert event["rating"] == work.rating + assert event["popularity"] == work.popularity + assert event["genre"] == work.genres[0].name + assert event["availability_time"] == self.timestamp_to_string( + license_pool.availability_time ) - # Set up a bucket name used for storing analytics data - storage_integration.setting( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ).value = "analytics" - - # Create a link to the S3 storage service - create( - s3_analytics_fixture.db.session, - ExternalIntegrationLink, - external_integration_id=analytics_integration.id, - other_integration_id=storage_integration.id, - purpose=ExternalIntegrationLink.ANALYTICS, - ) - - # Set up a mock instead of real S3Uploader class acting as the S3 storage service - s3_uploader = create_autospec(spec=S3Uploader) - with patch("core.mirror.MirrorUploader.implementation") as mock_implementation: - mock_implementation.return_value = s3_uploader - # Create a test book - work = s3_analytics_fixture.db.work( - data_source_name=DataSource.GUTENBERG, - title="Test Book", - authors=("Test Author 1", "Test Author 2"), - genre="Test Genre", - language="eng", - audience=Classifier.AUDIENCE_ADULT, - with_license_pool=True, - ) - license_pool = work.license_pools[0] - edition = work.presentation_edition - - # Set up event's metadata - event_time = datetime.datetime.utcnow() - event_time_formatted = self.timestamp_to_string(event_time) - event_type = CirculationEvent.CM_CHECKOUT - - # Act - provider.collect_event( - s3_analytics_fixture.db.default_library(), - license_pool, - event_type, - event_time, - ) - - # Assert - s3_uploader.analytics_file_url.assert_called_once_with( - s3_analytics_fixture.db.default_library(), - license_pool, - event_type, - event_time, - ) - s3_uploader.mirror_one.assert_called_once() - representation, _ = s3_uploader.mirror_one.call_args[0] - - assert MediaTypes.APPLICATION_JSON_MEDIA_TYPE == representation.media_type - - content = representation.content - event = json.loads(content) - data_source = license_pool.data_source if license_pool else None - identifier = license_pool.identifier if license_pool else None - collection = license_pool.collection if license_pool else None - work = license_pool.work if license_pool else None - - assert event_type == event["type"] - assert event_time_formatted == event["start"] - assert event_time_formatted == event["end"] - assert s3_analytics_fixture.db.default_library().id == event["library_id"] - assert license_pool.id == event["license_pool_id"] - assert edition.publisher == event["publisher"] - assert edition.imprint == event["imprint"] - assert edition.issued == event["issued"] - assert edition.published == event["published"] - assert edition.medium == event["medium"] - assert collection.name == event["collection"] - assert identifier.type == event["identifier_type"] - assert identifier.identifier == event["identifier"] - assert data_source.name == event["data_source"] - assert work.audience == event["audience"] - assert work.fiction == event["fiction"] - assert work.summary_text == event["summary_text"] - assert work.quality == event["quality"] - assert work.rating == event["rating"] - assert work.popularity == event["popularity"] - assert work.genres[0].name == event["genre"] - assert ( - self.timestamp_to_string(license_pool.availability_time) - == event["availability_time"] - ) - assert license_pool.licenses_owned == event["licenses_owned"] - assert license_pool.licenses_available == event["licenses_available"] - assert license_pool.licenses_reserved == event["licenses_reserved"] - assert license_pool.patrons_in_hold_queue == event["patrons_in_hold_queue"] - assert False == event["self_hosted"] - assert work.title == event["title"] - assert work.series == event["series"] - assert work.series_position == event["series_position"] - assert work.language == event["language"] + assert event["licenses_owned"] == license_pool.licenses_owned + assert event["licenses_available"] == license_pool.licenses_available + assert event["licenses_reserved"] == license_pool.licenses_reserved + assert event["patrons_in_hold_queue"] == license_pool.patrons_in_hold_queue + assert event["self_hosted"] is False + assert event["title"] == work.title + assert event["series"] == work.series + assert event["series_position"] == work.series_position + assert event["language"] == work.language diff --git a/tests/fixtures/s3.py b/tests/fixtures/s3.py index fb3dc23229..7f802b0382 100644 --- a/tests/fixtures/s3.py +++ b/tests/fixtures/s3.py @@ -1,197 +1,131 @@ +from __future__ import annotations + import functools -import os -from typing import Any, Iterable -from urllib.parse import urlsplit +import sys +from types import TracebackType +from typing import BinaryIO, List, Literal, NamedTuple, Optional, Protocol, Type +from unittest.mock import MagicMock -import boto3 import pytest +from mypy_boto3_s3 import S3Client -from core.model import ExternalIntegration -from core.s3 import S3Uploader, S3UploaderConfiguration -from tests.fixtures.database import DatabaseTransactionFixture - - -class S3UploaderFixture: - transaction: DatabaseTransactionFixture - - def __init__(self, transaction: DatabaseTransactionFixture): - self.transaction = transaction - - def integration(self, **settings): - """Create and configure a simple S3 integration.""" - integration = self.transaction.external_integration( - ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings - ) - integration.username = settings.get("username", "username") - integration.password = settings.get("password", "password") - return integration - - @staticmethod - def add_settings_value(settings, key, value): - """Adds a value to settings dictionary - - :param settings: Settings dictionary - :type settings: Dict - - :param key: Key - :type key: string +from core.service.storage.s3 import MultipartS3ContextManager, S3Service - :param value: Value - :type value: Any +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self - :return: Updated settings dictionary - :rtype: Dict - """ - if value: - if settings: - settings[key] = value - else: - settings = {key: value} +class MockS3ServiceUpload(NamedTuple): + key: str + content: bytes + media_type: Optional[str] - return settings - def create_s3_uploader( +class MockMultipartS3ContextManager(MultipartS3ContextManager): + def __init__( self, - client_class=None, - uploader_class=None, - region=None, - addressing_style=None, - **settings, - ): - """Creates a new instance of S3 uploader - - :param client_class: (Optional) Custom class to be used instead of boto3's client class - :type client_class: Optional[Type] - - :param: uploader_class: (Optional) Custom class which will be used insted of S3Uploader - :type uploader_class: Optional[Type] - - :param region: (Optional) S3 region - :type region: Optional[string] - - :param addressing_style: (Optional) S3 addressing style - :type addressing_style: Optional[string] - - :param settings: Kwargs used for initializing an external integration - :type: Optional[Dict] - - :return: New intance of S3 uploader - :rtype: S3Uploader - """ - settings = self.add_settings_value( - settings, S3UploaderConfiguration.S3_REGION, region - ) - settings = self.add_settings_value( - settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style - ) - integration = self.integration(**settings) - uploader_class = uploader_class or S3Uploader + parent: MockS3Service, + bucket: str, + key: str, + url: str, + media_type: Optional[str] = None, + ) -> None: + self.parent = parent + self.key = key + self.bucket = bucket + self.media_type = media_type + self.content = b"" + self._complete = False + self._url = url + self._exception = None + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Literal[False]: + if self.content: + self._complete = True + self.parent.uploads.append( + MockS3ServiceUpload(self.key, self.content, self.media_type) + ) + return False - return uploader_class(integration, client_class=client_class) + def upload_part(self, content: bytes) -> None: + self.content += content -@pytest.fixture -def s3_uploader_fixture( - db, -) -> S3UploaderFixture: - return S3UploaderFixture(db) - - -class S3UploaderIntegrationFixture(S3UploaderFixture): - SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( - "SIMPLIFIED_TEST_MINIO_ENDPOINT_URL", "http://localhost:9000" - ) - SIMPLIFIED_TEST_MINIO_USER = os.environ.get( - "SIMPLIFIED_TEST_MINIO_USER", "minioadmin" - ) - SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( - "SIMPLIFIED_TEST_MINIO_PASSWORD", "minioadmin" - ) - _, SIMPLIFIED_TEST_MINIO_HOST, _, _, _ = urlsplit( - SIMPLIFIED_TEST_MINIO_ENDPOINT_URL - ) - - minio_s3_client: Any - """boto3 client connected to locally running MinIO instance""" - - s3_client_class = None - """Factory function used for creating a boto3 client inside S3Uploader""" - - def __init__(self, transaction: DatabaseTransactionFixture): - super().__init__(transaction) - self.minio_s3_client = boto3.client( - "s3", - aws_access_key_id=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_USER, - aws_secret_access_key=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_PASSWORD, - endpoint_url=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, - ) - self.s3_client_class = functools.partial( - boto3.client, - endpoint_url=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, +class MockS3Service(S3Service): + def __init__( + self, + client: S3Client, + region: str, + bucket: str, + url_template: str, + ) -> None: + super().__init__(client, region, bucket, url_template) + self.uploads: List[MockS3ServiceUpload] = [] + + def store_stream( + self, + key: str, + stream: BinaryIO, + content_type: Optional[str] = None, + ) -> Optional[str]: + self.uploads.append(MockS3ServiceUpload(key, stream.read(), content_type)) + return self.generate_url(key) + + def multipart( + self, key: str, content_type: Optional[str] = None + ) -> MultipartS3ContextManager: + return MockMultipartS3ContextManager( + self, self.bucket, key, self.generate_url(key), content_type ) - def close(self): - response = self.minio_s3_client.list_buckets() - - for bucket in response["Buckets"]: - bucket_name = bucket["Name"] - response = self.minio_s3_client.list_objects(Bucket=bucket_name) - for object in response.get("Contents", []): - object_key = object["Key"] - self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) - - self.minio_s3_client.delete_bucket(Bucket=bucket_name) - - def create_s3_uploader( +class S3ServiceProtocol(Protocol): + def __call__( self, - client_class=None, - uploader_class=None, - region=None, - addressing_style=None, - **settings, - ): - """Creates a new instance of S3 uploader - - :param client_class: (Optional) Custom class to be used instead of boto3's client class - :type client_class: Optional[Type] - - :param: uploader_class: (Optional) Custom class which will be used insted of S3Uploader - :type uploader_class: Optional[Type] - - :param region: (Optional) S3 region - :type region: Optional[string] - - :param addressing_style: (Optional) S3 addressing style - :type addressing_style: Optional[string] - - :param settings: Kwargs used for initializing an external integration - :type: Optional[Dict] - - :return: New intance of S3 uploader - :rtype: S3Uploader - """ - if settings and "username" not in settings: - self.add_settings_value( - settings, "username", self.SIMPLIFIED_TEST_MINIO_USER - ) - if settings and "password" not in settings: - self.add_settings_value( - settings, "password", self.SIMPLIFIED_TEST_MINIO_PASSWORD - ) - if not client_class: - client_class = self.s3_client_class + client: Optional[S3Client] = None, + region: Optional[str] = None, + bucket: Optional[str] = None, + url_template: Optional[str] = None, + ) -> S3Service: + ... + + +class S3ServiceFixture: + def __init__(self): + self.mock_s3_client = MagicMock() + self.region = "region" + self.url_template = "https://{region}.test.com/{bucket}/{key}" + self.bucket = "bucket" + + @property + def service(self) -> S3ServiceProtocol: + return functools.partial( + S3Service, + client=self.mock_s3_client, + region=self.region, + bucket=self.bucket, + url_template=self.url_template, + ) - return super().create_s3_uploader( - client_class, uploader_class, region, addressing_style, **settings + def mock_service(self) -> MockS3Service: + return MockS3Service( + client=self.mock_s3_client, + region=self.region, + bucket=self.bucket, + url_template=self.url_template, ) @pytest.fixture -def s3_uploader_integration_fixture( - db, -) -> Iterable[S3UploaderIntegrationFixture]: - fixture = S3UploaderIntegrationFixture(db) - yield fixture - fixture.close() +def s3_service_fixture() -> S3ServiceFixture: + return S3ServiceFixture() diff --git a/tests/fixtures/services.py b/tests/fixtures/services.py new file mode 100644 index 0000000000..edbeccb524 --- /dev/null +++ b/tests/fixtures/services.py @@ -0,0 +1,42 @@ +from unittest.mock import MagicMock + +import pytest + +from core.service.container import Services +from core.service.storage.container import Storage +from core.service.storage.s3 import S3Service + + +class MockStorageFixture: + def __init__(self): + self.storage = Storage() + self.analytics = MagicMock(spec=S3Service) + self.storage.analytics.override(self.analytics) + self.public = MagicMock(spec=S3Service) + self.storage.public.override(self.public) + self.s3_client = MagicMock() + self.storage.s3_client.override(self.s3_client) + + +@pytest.fixture +def mock_storage_fixture() -> MockStorageFixture: + return MockStorageFixture() + + +class MockServicesFixture: + """ + Provide a services container with all the services mocked out + by MagicMock objects. + """ + + def __init__(self, storage: MockStorageFixture): + self.services = Services() + self.services.storage.override(storage.storage) + self.storage = storage + + +@pytest.fixture +def mock_services_fixture( + mock_storage_fixture: MockStorageFixture, +) -> MockServicesFixture: + return MockServicesFixture(mock_storage_fixture) diff --git a/tox.ini b/tox.ini index 1721430ba6..34ffa9983f 100644 --- a/tox.ini +++ b/tox.ini @@ -11,14 +11,15 @@ commands = core: pytest {posargs:tests/core} passenv = SIMPLIFIED_* + PALACE_* CI setenv = {api,core}: COVERAGE_FILE = .coverage.{envname} docker: SIMPLIFIED_TEST_DATABASE=postgresql://simplified_test:test@localhost:9005/simplified_circulation_test docker: SIMPLIFIED_TEST_OPENSEARCH=http://localhost:9007 - core-docker: SIMPLIFIED_TEST_MINIO_ENDPOINT_URL=http://localhost:9004 - core-docker: SIMPLIFIED_TEST_MINIO_USER=simplified - core-docker: SIMPLIFIED_TEST_MINIO_PASSWORD=12345678901234567890 + core-docker: PALACE_TEST_MINIO_ENDPOINT_URL=http://localhost:9004 + core-docker: PALACE_TEST_MINIO_USER=palace + core-docker: PALACE_TEST_MINIO_PASSWORD=12345678901234567890 docker = docker: os-circ docker: db-circ @@ -68,7 +69,7 @@ ports = [docker:minio-circ] image = bitnami/minio:2022.3.3 environment = - MINIO_ROOT_USER=simplified + MINIO_ROOT_USER=palace MINIO_ROOT_PASSWORD=12345678901234567890 ports = 9004:9000/tcp