From 9488ba1b05a974b01eb7c4745d28c428edccd3d1 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 1 May 2024 15:39:20 -0300 Subject: [PATCH] Run tests using pytest-xdist to speed up our test runs. --- docker-compose.yml | 2 +- poetry.lock | 37 ++- pyproject.toml | 8 +- src/palace/manager/core/config.py | 35 +-- src/palace/manager/scripts.py | 5 +- src/palace/manager/sqlalchemy/session.py | 15 +- tests/fixtures/api_controller.py | 26 +- tests/fixtures/database.py | 257 +++++++++++++----- tests/fixtures/search.py | 85 +++--- tests/fixtures/webserver.py | 6 +- tests/manager/api/controller/test_profile.py | 2 +- .../api/controller/test_scopedsession.py | 193 +++++-------- tests/manager/api/test_app.py | 11 +- tests/manager/api/test_overdrive.py | 13 - tests/manager/api/test_scripts.py | 2 +- tests/manager/search/test_external_search.py | 76 +++--- tests/manager/search/test_migration_states.py | 18 +- tests/manager/search/test_service.py | 70 ++--- tests/migration/conftest.py | 37 +-- tests/migration/test_instance_init_script.py | 100 +++++-- tox.ini | 6 +- 21 files changed, 540 insertions(+), 464 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index fecf154f06..143fc3d351 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -22,7 +22,7 @@ x-cm-variables: &cm PALACE_CELERY_CLOUDWATCH_STATISTICS_DRYRUN: "true" # Set up the environment variables used for testing as well - SIMPLIFIED_TEST_DATABASE: "postgresql://palace:test@pg:5432/circ" + PALACE_TEST_DATABASE_URL: "postgresql://palace:test@pg:5432/circ" PALACE_TEST_SEARCH_URL: "http://os:9200" PALACE_TEST_MINIO_ENDPOINT_URL: "http://minio:9000" PALACE_TEST_MINIO_USER: "palace" diff --git a/poetry.lock b/poetry.lock index 6cd5f8141f..e6d04dd562 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1235,6 +1235,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "execnet" +version = "2.1.1" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + [[package]] name = "expiringdict" version = "1.2.2" @@ -2265,6 +2279,7 @@ files = [ {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, @@ -3589,6 +3604,26 @@ files = [ [package.dependencies] pytest = ">=7.0.0" +[[package]] +name = "pytest-xdist" +version = "3.6.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -4906,4 +4941,4 @@ lxml = ">=3.8" [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "ef03f72fe652d1b7e53071457a4023a6e97147cff88aeca4919e251e0e4a5fb2" +content-hash = "f170388105405aa1f69a194f5c5e77b095d657e423eab88f3b22ab0a530f8fa8" diff --git a/pyproject.toml b/pyproject.toml index 30c94af89e..a593f4dde0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,11 +9,14 @@ exclude_also = [ '^\s*pass\s*$', '^\s*raise NotImplementedError\s*$', ] +include_namespace_packages = true [tool.coverage.run] branch = true +concurrency = ["multiprocessing", "thread"] +parallel = true relative_files = true -source = ["palace.manager"] +source = ["src"] [tool.isort] known_first_party = ["palace"] @@ -286,6 +289,7 @@ pytest-alembic = "^0.11.0" pytest-celery = "^0.0.0" pytest-cov = "^5.0.0" pytest-timeout = "*" +pytest-xdist = "^3.5.0" requests-mock = "1.12.1" types-aws-xray-sdk = "^2.11.0.13" types-Flask-Cors = "^4.0.0" @@ -306,6 +310,8 @@ psycopg2 = "~2.9.5" addopts = [ "--cov", "--cov-report=xml", + "--dist=load", + "--numprocesses=auto", "--strict-markers", ] markers = [ diff --git a/src/palace/manager/core/config.py b/src/palace/manager/core/config.py index 4efa28f483..7c4228b4f3 100644 --- a/src/palace/manager/core/config.py +++ b/src/palace/manager/core/config.py @@ -41,7 +41,6 @@ class Configuration(ConfigurationConstants): log = logging.getLogger("Configuration file loader") # Environment variables that contain URLs to the database - DATABASE_TEST_ENVIRONMENT_VARIABLE = "SIMPLIFIED_TEST_DATABASE" DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE = "SIMPLIFIED_PRODUCTION_DATABASE" # Environment variable for Overdrive fulfillment keys @@ -72,46 +71,26 @@ class Configuration(ConfigurationConstants): @classmethod def database_url(cls): - """Find the database URL configured for this site. - - For compatibility with old configurations, we will look in the - site configuration first. - - If it's not there, we will look in the appropriate environment - variable. - """ - - # To avoid expensive mistakes, test and production databases - # are always configured with separate keys. The TESTING variable - # controls which database is used, and it's set by the - # package_setup() function called in every component's - # tests/__init__.py. - test = os.environ.get("TESTING", False) - if test: - environment_variable = cls.DATABASE_TEST_ENVIRONMENT_VARIABLE - else: - environment_variable = cls.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE - - url = os.environ.get(environment_variable) + """Find the database URL configured for this site.""" + url = os.environ.get(cls.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE) if not url: raise CannotLoadConfiguration( "Database URL was not defined in environment variable (%s)." - % environment_variable + % cls.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE ) - url_obj = None try: url_obj = make_url(url) except ArgumentError as e: - # Improve the error message by giving a guide as to what's - # likely to work. + # Improve the error message by giving a guide as to what's likely to work. raise ArgumentError( "Bad format for database URL (%s). Expected something like postgresql://[username]:[password]@[hostname]:[port]/[database name]" % url ) - # Calling __to_string__ will hide the password. - logging.info("Connecting to database: %s" % url_obj.__to_string__()) + logging.info( + "Connecting to database: %s" % url_obj.render_as_string(hide_password=True) + ) return url @classmethod diff --git a/src/palace/manager/scripts.py b/src/palace/manager/scripts.py index a171b0adf6..a4682d55c9 100644 --- a/src/palace/manager/scripts.py +++ b/src/palace/manager/scripts.py @@ -486,7 +486,10 @@ class InstanceInitializationScript: remain idempotent. """ - def __init__(self, config_file: Path | None = None) -> None: + def __init__( + self, + config_file: Path | None = None, + ) -> None: self._log: logging.Logger | None = None self._container = container_instance() diff --git a/src/palace/manager/sqlalchemy/session.py b/src/palace/manager/sqlalchemy/session.py index 9c5f1f003d..89a8b92799 100644 --- a/src/palace/manager/sqlalchemy/session.py +++ b/src/palace/manager/sqlalchemy/session.py @@ -2,7 +2,6 @@ import json import logging -import os from typing import Any from pydantic.json import pydantic_encoder @@ -62,18 +61,8 @@ def setup_event_listener( return session @classmethod - def sessionmaker(cls, url=None, session=None): - if not (url or session): - url = Configuration.database_url() - if url: - bind_obj = cls.engine(url) - elif session: - bind_obj = session.get_bind() - if not os.environ.get("TESTING"): - # If a factory is being created from a session in test mode, - # use the same Connection for all of the tests so objects can - # be accessed. Otherwise, bind against an Engine object. - bind_obj = bind_obj.engine + def sessionmaker(cls): + bind_obj = cls.engine() session_factory = sessionmaker(bind=bind_obj) cls.setup_event_listener(session_factory) return session_factory diff --git a/tests/fixtures/api_controller.py b/tests/fixtures/api_controller.py index 38e7542228..7b8c5de333 100644 --- a/tests/fixtures/api_controller.py +++ b/tests/fixtures/api_controller.py @@ -83,7 +83,6 @@ def __init__( self, db: DatabaseTransactionFixture, services_fixture: ServicesFixture, - setup_cm: bool, ): self.db = db self.app = app @@ -102,11 +101,10 @@ def __init__( self.search_index = ExternalSearchIndexFake() self.services_fixture.services.search.index.override(self.search_index) - if setup_cm: - # NOTE: Any reference to self._default_library below this - # point in this method will cause the tests in - # TestScopedSession to hang. - app.manager = self.circulation_manager_setup() + # NOTE: Any reference to self._default_library below this + # point in this method will cause the tests in + # TestScopedSession to hang. + app.manager = self.circulation_manager_setup() @contextmanager def wired_container(self): @@ -259,19 +257,7 @@ def controller_fixture( db: DatabaseTransactionFixture, services_fixture: ServicesFixture ): time_then = datetime.datetime.now() - fixture = ControllerFixture(db, services_fixture, setup_cm=True) - time_now = datetime.datetime.now() - time_diff = time_now - time_then - logging.info("controller init took %s", time_diff) - yield fixture - - -@pytest.fixture(scope="function") -def controller_fixture_without_cm( - db: DatabaseTransactionFixture, services_fixture: ServicesFixture -): - time_then = datetime.datetime.now() - fixture = ControllerFixture(db, services_fixture, setup_cm=False) + fixture = ControllerFixture(db, services_fixture) time_now = datetime.datetime.now() time_diff = time_now - time_then logging.info("controller init took %s", time_diff) @@ -315,7 +301,7 @@ class CirculationControllerFixture(ControllerFixture): def __init__( self, db: DatabaseTransactionFixture, services_fixture: ServicesFixture ): - super().__init__(db, services_fixture, setup_cm=True) + super().__init__(db, services_fixture) self.works = [] self.add_works(self.BOOKS) diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index a363b6793e..a24dafcd32 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -2,30 +2,35 @@ import importlib import logging -import os import shutil import tempfile import time import uuid from collections.abc import Generator, Iterable from contextlib import contextmanager +from functools import cached_property from textwrap import dedent from typing import Any +from unittest.mock import patch import pytest import sqlalchemy from Crypto.PublicKey.RSA import import_key -from sqlalchemy import MetaData -from sqlalchemy.engine import Connection, Engine, Transaction +from pydantic import PostgresDsn +from sqlalchemy import MetaData, create_engine, text +from sqlalchemy.engine import Connection, Transaction, make_url from sqlalchemy.orm import Session, sessionmaker +from typing_extensions import Self from palace.manager.api.discovery.opds_registration import OpdsRegistrationService from palace.manager.core.classifier import Classifier from palace.manager.core.config import Configuration +from palace.manager.core.exceptions import BasePalaceException from palace.manager.core.opds_import import OPDSAPI from palace.manager.integration.configuration.library import LibrarySettings from palace.manager.integration.goals import Goals from palace.manager.integration.registry.discovery import DiscoveryRegistry +from palace.manager.service.configuration import ServiceConfiguration from palace.manager.sqlalchemy.constants import MediaTypes from palace.manager.sqlalchemy.model.classification import ( Classification, @@ -62,79 +67,197 @@ from palace.manager.util.datetime_helpers import utc_now -class ApplicationFixture: - """The ApplicationFixture is a representation of the state that must be set up in order to run the application for - testing.""" +class TestIdFixture: + """ + This fixture creates a unique per test run worker id. Each test worker will get its own + ID, so its suitable for initializing shared resources that need to be unique per test worker. + For example - database name, opensearch index name, etc. + """ - @classmethod - def drop_existing_schema(cls): - engine = SessionManager.engine() - metadata_obj = MetaData() - metadata_obj.reflect(bind=engine) - metadata_obj.drop_all(engine) - metadata_obj.clear() + def __init__(self, worker_id: str, prefix: str): + self.worker_id = worker_id + # This flag indicates that the tests are running in parallel mode. + self.parallel = worker_id != "master" + # We create a unique run id for each test run. The dashes are + # replaced with underscores to make it a valid identifier for + # in PostgreSQL. + self.run_id = str(uuid.uuid4()).replace("-", "_") + self.id = f"{prefix}_{self.worker_id}_{self.run_id}" + + +@pytest.fixture(scope="session") +def test_id(worker_id: str) -> TestIdFixture: + return TestIdFixture(worker_id, "session") + + +@pytest.fixture(scope="function") +def test_id_func(worker_id: str) -> TestIdFixture: + return TestIdFixture(worker_id, "function") + + +class DatabaseTestConfiguration(ServiceConfiguration): + url: PostgresDsn + create_database: bool = True + + class Config: + env_prefix = "PALACE_TEST_DATABASE_" + + +class DatabaseNameFixture: + def __init__(self, test_id: TestIdFixture): + self.test_id = test_id + config = DatabaseTestConfiguration() + if not config.create_database and self.test_id.parallel: + raise BasePalaceException( + "Database creation is disabled, but tests are running in parallel mode. " + "This is not supported. Please enable database creation or run tests in serial mode." + ) + self.create_database = config.create_database + self.main_url = make_url(config.url) + + @cached_property + def database_name(self) -> str: + if not self.create_database: + return self.main_url.database + + return self.test_id.id + + @cached_property + def worker_url(self) -> str: + return str(self.main_url.set(database=self.database_name)) + + @contextmanager + def _db_connection(self) -> Generator[Connection, None, None]: + engine = create_engine(self.main_url, isolation_level="AUTOCOMMIT") + connection = engine.connect() + try: + yield connection + finally: + connection.close() + engine.dispose() + + def create(self) -> None: + if not self.create_database: + return + + with self._db_connection() as connection: + user = self.main_url.username + connection.execute(text(f"CREATE DATABASE {self.database_name}")) + connection.execute( + text(f"GRANT ALL PRIVILEGES ON DATABASE {self.database_name} TO {user}") + ) + + def drop(self) -> None: + if not self.create_database: + return + + with self._db_connection() as connection: + connection.execute(text(f"DROP DATABASE {self.database_name}")) @classmethod - def create(cls): - # This will make sure we always connect to the test database. - os.environ["TESTING"] = "true" + @contextmanager + def fixture(cls, test_id: TestIdFixture) -> Generator[Self, None, None]: + db_name_fixture = cls(test_id) + db_name_fixture.create() + try: + # Patch the database URL, so any code that uses it will use the worker specific database. + with patch.object( + Configuration, "database_url", return_value=db_name_fixture.worker_url + ): + yield db_name_fixture + finally: + db_name_fixture.drop() - # Drop any existing schema. It will be recreated when the database is initialized. - _cls = cls() - _cls.drop_existing_schema() - return _cls - def close(self): - if "TESTING" in os.environ: - del os.environ["TESTING"] +@pytest.fixture(scope="session") +def database_name(test_id: TestIdFixture) -> Generator[DatabaseNameFixture, None, None]: + with DatabaseNameFixture.fixture(test_id) as fixture: + yield fixture + + +@pytest.fixture(scope="function") +def database_name_func( + test_id_func: TestIdFixture, +) -> Generator[DatabaseNameFixture, None, None]: + with DatabaseNameFixture.fixture(test_id_func) as fixture: + if not fixture.create_database: + raise BasePalaceException( + "Cannot provide a function scoped database when database creation is disabled." + ) + yield fixture class DatabaseFixture: """The DatabaseFixture stores a reference to the database.""" - _engine: Engine - _connection: Connection + # We store a reference to SessionManager.engine so that we can patch it in tests + # and still access the function from the fixture. + engine_func = SessionManager.engine - def __init__(self, engine: Engine, connection: Connection): - self._engine = engine - self._connection = connection + def __init__(self, database_name: DatabaseNameFixture) -> None: + self.database_name = database_name + self.engine = self.engine_func(url=self.database_name.worker_url) + self.connection = self.engine.connect() - @staticmethod - def _get_database_connection() -> tuple[Engine, Connection]: - url = Configuration.database_url() - engine = SessionManager.engine(url) - connection = engine.connect() - return engine, connection + def drop_existing_schema(self) -> None: + metadata_obj = MetaData() + metadata_obj.reflect(bind=self.engine) + metadata_obj.drop_all(self.engine) + metadata_obj.clear() - @staticmethod - def _initialize_database(connection: Connection): - SessionManager.initialize_schema(connection) - with Session(connection) as session: + def initialize_database(self) -> None: + SessionManager.initialize_schema(self.connection) + with Session(self.connection) as session: # Initialize the database with default data SessionManager.initialize_data(session) @staticmethod - def _load_core_model_classes(): + def load_model_classes(): # Load all the core model classes so that they are registered with the ORM. import palace.manager.sqlalchemy.model importlib.reload(palace.manager.sqlalchemy.model) - @classmethod - def create(cls) -> DatabaseFixture: - cls._load_core_model_classes() - engine, connection = cls._get_database_connection() - cls._initialize_database(connection) - return DatabaseFixture(engine, connection) - def close(self): # Destroy the database connection and engine. - self._connection.close() - self._engine.dispose() + self.connection.close() + self.engine.dispose() - @property - def connection(self) -> Connection: - return self._connection + @classmethod + @contextmanager + def fixture(cls, database_name: DatabaseNameFixture) -> Generator[Self, None, None]: + db_fixture = cls(database_name) + db_fixture.drop_existing_schema() + db_fixture.load_model_classes() + db_fixture.initialize_database() + try: + # Patch the SessionManager to make sure tests are not trying to access the engine directly. + with patch.object( + SessionManager, + "engine", + side_effect=BasePalaceException( + "Engine needs to be accessed from fixture within tests." + ), + ): + yield db_fixture + finally: + db_fixture.close() + + +@pytest.fixture(scope="session") +def database( + database_name: DatabaseNameFixture, +) -> Generator[DatabaseFixture, None, None]: + with DatabaseFixture.fixture(database_name) as db: + yield db + + +@pytest.fixture(scope="function") +def database_func( + database_name_func: DatabaseNameFixture, +) -> Generator[DatabaseFixture, None, None]: + with DatabaseFixture.fixture(database_name_func) as db: + yield db class DatabaseTransactionFixture: @@ -871,6 +994,15 @@ def credential(self, data_source_name=DataSource.GUTENBERG, type=None, patron=No return credential +@pytest.fixture(scope="function") +def db( + database: DatabaseFixture, +) -> Generator[DatabaseTransactionFixture, None, None]: + tr = DatabaseTransactionFixture.create(database) + yield tr + tr.close() + + class TemporaryDirectoryConfigurationFixture: """A fixture that configures the Configuration system to use a temporary directory. The directory is cleaned up when the fixture is closed.""" @@ -906,29 +1038,6 @@ def temporary_directory_configuration() -> ( fix.close() -@pytest.fixture(scope="session") -def application() -> Iterable[ApplicationFixture]: - app = ApplicationFixture.create() - yield app - app.close() - - -@pytest.fixture(scope="session") -def database(application: ApplicationFixture) -> Iterable[DatabaseFixture]: - db = DatabaseFixture.create() - yield db - db.close() - - -@pytest.fixture(scope="function") -def db( - database: DatabaseFixture, -) -> Generator[DatabaseTransactionFixture, None, None]: - tr = DatabaseTransactionFixture.create(database) - yield tr - tr.close() - - class IntegrationConfigurationFixture: def __init__(self, db: DatabaseTransactionFixture): self.db = db diff --git a/tests/fixtures/search.py b/tests/fixtures/search.py index cbb4256c18..4936951e8e 100644 --- a/tests/fixtures/search.py +++ b/tests/fixtures/search.py @@ -1,6 +1,7 @@ from __future__ import annotations from collections.abc import Generator +from contextlib import contextmanager import pytest from opensearchpy import OpenSearch @@ -14,14 +15,13 @@ from palace.manager.service.search.container import Search from palace.manager.sqlalchemy.model.work import Work from palace.manager.util.log import LoggerMixin -from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.database import DatabaseTransactionFixture, TestIdFixture from tests.fixtures.services import ServicesFixture from tests.mocks.search import SearchServiceFake class SearchTestConfiguration(ServiceConfiguration): url: AnyHttpUrl - index_prefix: str = "test_index" timeout: int = 20 maxsize: int = 25 @@ -38,41 +38,31 @@ class ExternalSearchFixture(LoggerMixin): to ensure that it works well overall, with a realistic index. """ - def __init__(self, db: DatabaseTransactionFixture, services: Services): + def __init__( + self, db: DatabaseTransactionFixture, services: Services, test_id: TestIdFixture + ): self.search_config = SearchTestConfiguration() self.services_container = services + self.index_prefix = test_id.id # Set up our testing search instance in the services container self.search_container = Search() - self.search_container.config.from_dict(self.search_config.dict()) + search_config_dict = self.search_config.dict() + search_config_dict["index_prefix"] = self.index_prefix + self.search_container.config.from_dict(search_config_dict) self.services_container.search.override(self.search_container) - self._indexes_created: list[str] = [] self.db = db self.client: OpenSearch = services.search.client() self.service: SearchServiceOpensearch1 = services.search.service() self.index: ExternalSearchIndex = services.search.index() - self._indexes_created = [] # Make sure the services container is wired up with the newly created search container wire_container(self.services_container) - def record_index(self, name: str): - self.log.info(f"Recording index {name} for deletion") - self._indexes_created.append(name) - def close(self): - for index in self._indexes_created: - try: - self.log.info(f"Deleting index {index}") - self.client.indices.delete(index) - except Exception as e: - self.log.info(f"Failed to delete index {index}: {e}") - - # Force test index deletion - self.client.indices.delete("test_index*") - self.log.info("Waiting for operations to complete.") - self.client.indices.refresh() + # Delete our index prefix + self.client.indices.delete(f"{self.index_prefix}*") # Unwire the services container self.services_container.unwire() @@ -90,19 +80,30 @@ def default_work(self, *args, **kwargs): work.set_presentation_ready() return work - def init_indices(self): - self.index.initialize_indices() + @classmethod + @contextmanager + def fixture( + cls, db: DatabaseTransactionFixture, services: Services, test_id: TestIdFixture + ): + fixture = cls(db, services, test_id) + try: + yield fixture + finally: + fixture.close() @pytest.fixture(scope="function") def external_search_fixture( - db: DatabaseTransactionFixture, services_fixture: ServicesFixture + db: DatabaseTransactionFixture, + services_fixture: ServicesFixture, + test_id_func: TestIdFixture, ) -> Generator[ExternalSearchFixture, None, None]: """Ask for an external search system.""" """Note: You probably want EndToEndSearchFixture instead.""" - fixture = ExternalSearchFixture(db, services_fixture.services) - yield fixture - fixture.close() + with ExternalSearchFixture.fixture( + db, services_fixture.services, test_id_func + ) as fixture: + yield fixture class EndToEndSearchFixture: @@ -237,9 +238,11 @@ def _compare_hits(self, expect, hits, query_args, should_be_ordered=True, **kwar count = self.external_search_index.count_works(filter) assert count == len(expect) - def close(self): - for index in self.external_search_index.search_service().indexes_created(): - self.external_search.record_index(index) + @classmethod + @contextmanager + def fixture(cls, search_fixture: ExternalSearchFixture): + fixture = cls(search_fixture) + yield fixture @pytest.fixture(scope="function") @@ -247,9 +250,8 @@ def end_to_end_search_fixture( external_search_fixture: ExternalSearchFixture, ) -> Generator[EndToEndSearchFixture, None, None]: """Ask for an external search system that can be populated with data for end-to-end tests.""" - fixture = EndToEndSearchFixture(external_search_fixture) - yield fixture - fixture.close() + with EndToEndSearchFixture.fixture(external_search_fixture) as fixture: + yield fixture class ExternalSearchFixtureFake: @@ -269,15 +271,20 @@ def close(self): self.services.unwire() self.services.search.reset_override() + @classmethod + @contextmanager + def fixture(cls, db: DatabaseTransactionFixture, services: Services): + fixture = cls(db, services) + try: + yield fixture + finally: + fixture.close() + @pytest.fixture(scope="function") def external_search_fake_fixture( db: DatabaseTransactionFixture, services_fixture: ServicesFixture ) -> Generator[ExternalSearchFixtureFake, None, None]: """Ask for an external search system that can be populated with data for end-to-end tests.""" - fixture = ExternalSearchFixtureFake( - db=db, - services=services_fixture.services, - ) - yield fixture - fixture.close() + with ExternalSearchFixtureFake.fixture(db, services_fixture.services) as fixture: + yield fixture diff --git a/tests/fixtures/webserver.py b/tests/fixtures/webserver.py index f1744a3b76..d06814d9b6 100644 --- a/tests/fixtures/webserver.py +++ b/tests/fixtures/webserver.py @@ -124,12 +124,12 @@ class MockAPIServer(LoggerMixin): def __init__(self, address: str, port: int): self._address = address - self._port = port self._server = MockAPIInternalServer( - (self._address, self._port), bind_and_activate=True + (self._address, port), bind_and_activate=True ) self._server.mock_api_server = self self._server_thread = threading.Thread(target=self._server.serve_forever) + self._port = self._server.socket.getsockname()[1] self._responses = {} self._requests = [] @@ -180,7 +180,7 @@ def requests(self) -> list[MockAPIServerRequest]: @pytest.fixture def mock_web_server() -> Generator[MockAPIServer, None, None]: """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) + _server = MockAPIServer("127.0.0.1", 0) _server.start() yield _server _server.stop() diff --git a/tests/manager/api/controller/test_profile.py b/tests/manager/api/controller/test_profile.py index ac04f3989a..467c84676f 100644 --- a/tests/manager/api/controller/test_profile.py +++ b/tests/manager/api/controller/test_profile.py @@ -17,7 +17,7 @@ class ProfileFixture(ControllerFixture): def __init__( self, db: DatabaseTransactionFixture, services_fixture: ServicesFixture ): - super().__init__(db, services_fixture, setup_cm=True) + super().__init__(db, services_fixture) # Nothing will happen to this patron. This way we can verify # that a patron can only see/modify their own profile. self.other_patron = db.patron() diff --git a/tests/manager/api/controller/test_scopedsession.py b/tests/manager/api/controller/test_scopedsession.py index 1cc62ea29f..6aefc8257e 100644 --- a/tests/manager/api/controller/test_scopedsession.py +++ b/tests/manager/api/controller/test_scopedsession.py @@ -1,68 +1,57 @@ +from collections.abc import Generator from contextlib import contextmanager +from unittest.mock import MagicMock, patch import flask +import pytest from sqlalchemy.orm import Session +from typing_extensions import Self -from palace.manager import api -from palace.manager.core.opds_import import OPDSAPI +from palace.manager.api.app import app, initialize_database from palace.manager.sqlalchemy.flask_sqlalchemy_session import current_session -from palace.manager.sqlalchemy.model.collection import Collection from palace.manager.sqlalchemy.model.datasource import DataSource from palace.manager.sqlalchemy.model.identifier import Identifier -from palace.manager.sqlalchemy.model.library import Library -from palace.manager.sqlalchemy.util import create -from tests.fixtures.api_controller import ( - ControllerFixture, - ControllerFixtureSetupOverrides, -) - - -class ScopedHolder: - """A scoped holder used to store some state in the test. This is necessary because - we want to do some unusual things with scoped sessions, and don't necessary have access - to a database transaction fixture in all of the various methods that will be called. - """ +from palace.manager.sqlalchemy.session import SessionManager +from tests.fixtures.database import DatabaseFixture +from tests.fixtures.services import ServicesFixture +from tests.mocks.circulation import MockCirculationManager + + +class ScopedSessionFixture: + def __init__( + self, db_fixture: DatabaseFixture, services: ServicesFixture, session: Session + ): + self.db_fixture = db_fixture + self.session = session + self.services = services + self.app = app + with (patch.object(SessionManager, "engine", return_value=db_fixture.engine),): + initialize_database() + self.app.manager = MockCirculationManager(app._db, services.services) + self.mock_library = MagicMock() + self.mock_library.has_root_lanes = False + + def cleanup(self) -> None: + delattr(self.app, "manager") + delattr(self.app, "_db") + + @classmethod + @contextmanager + def fixture( + cls, db_fixture: DatabaseFixture, services_fixture: ServicesFixture + ) -> Generator[Self, None, None]: + with Session(db_fixture.connection) as session: + fixture = cls(db_fixture, services_fixture, session) + yield fixture + fixture.cleanup() - def __init__(self): - self.identifiers = 0 - - def fresh_id(self) -> str: - self.identifiers = self.identifiers + 1 - return str(self.identifiers) - - def make_default_libraries(self, session: Session): - libraries = [] - for i in range(2): - name = self.fresh_id() + " (library for scoped session)" - library, ignore = create( - session, - Library, - short_name=name, - public_key="x", - private_key=b"y", - settings_dict={ - "website": "https://library.com", - "help_web": "https://library.com/help", - }, - ) - libraries.append(library) - return libraries - - def make_default_collection(self, session: Session, library): - """We need to create a test collection that - uses the scoped session. - """ - collection, _ = Collection.by_name_and_protocol( - session, - self.fresh_id() + " (collection for scoped session)", - OPDSAPI.label(), - ) - settings = OPDSAPI.settings_class()( - external_account_id="http://url.com", data_source="OPDS" - ) - OPDSAPI.settings_update(collection.integration_configuration, settings) - library.collections.append(collection) - return collection + +@pytest.fixture +def scoped_session_fixture( + database_func: DatabaseFixture, services_fixture: ServicesFixture +) -> Generator[ScopedSessionFixture, None, None]: + with ScopedSessionFixture.fixture(database_func, services_fixture) as fixture: + yield fixture class TestScopedSession: @@ -74,45 +63,17 @@ class TestScopedSession: the corresponding behavior in unit tests. """ - @contextmanager - def request_context_and_transaction( + def test_scoped_session( self, - scoped: ScopedHolder, - controller_fixture_without_cm: ControllerFixture, - *args + scoped_session_fixture: ScopedSessionFixture, ): - """Run a simulated Flask request in a transaction that gets rolled - back at the end of the request. - """ - - fixture = controller_fixture_without_cm - with fixture.app.test_request_context(*args) as ctx: - transaction = current_session.begin_nested() # type: ignore[attr-defined] - fixture.app.manager = fixture.circulation_manager_setup_with_session( - session=current_session, # type: ignore[arg-type] - overrides=ControllerFixtureSetupOverrides( - make_default_libraries=scoped.make_default_libraries, - make_default_collection=scoped.make_default_collection, - ), - ) - yield ctx - transaction.rollback() - - def test_scoped_session(self, controller_fixture_without_cm: ControllerFixture): - fixture = controller_fixture_without_cm - api.app.initialize_database() - - # Create a holder that carries some state for the purposes of testing - scoped = ScopedHolder() - # Start a simulated request to the Flask app server. - with self.request_context_and_transaction(scoped, fixture, "/"): + with scoped_session_fixture.app.test_request_context("/"): # Each request is given its own database session distinct - # from the one used by most unit tests or the one - # associated with the CirculationManager object. + # from the one used by most unit tests and the one created + # outside of this context. session1 = current_session() - assert session1 != fixture.db - assert session1 != fixture.app.manager._db + assert session1 != scoped_session_fixture.session # Add an Identifier to the database. identifier = Identifier(type=DataSource.GUTENBERG, identifier="1024") @@ -124,23 +85,22 @@ def test_scoped_session(self, controller_fixture_without_cm: ControllerFixture): [identifier] = session1.query(Identifier).all() assert "1024" == identifier.identifier - # It doesn't show up in fixture.db.session, the database session - # used by most other unit tests, because it was created - # within the (still-active) context of a Flask request, - # which happens within a nested database transaction. - assert [] == fixture.db.session.query(Identifier).all() + # It doesn't show up in a different session because + # the request is still in progress so its transaction + # hasn't been committed. + assert [] == scoped_session_fixture.session.query(Identifier).all() # It shows up in the flask_scoped_session object that # created the request-scoped session, because within the # context of a request, running database queries on that object # actually runs them against your request-scoped session. - [identifier] = fixture.app.manager._db.query(Identifier).all() + [identifier] = app.manager._db.query(Identifier).all() assert "1024" == identifier.identifier # We use the session context manager here to make sure # we don't keep a transaction open for this new session # once we are done with it. - with fixture.app.manager._db.session_factory() as new_session: + with app.manager._db.session_factory() as new_session: # But if we were to use flask_scoped_session to create a # brand new session, it would not see the Identifier, # because it's running in a different database session. @@ -149,44 +109,33 @@ def test_scoped_session(self, controller_fixture_without_cm: ControllerFixture): # When the index controller runs in the request context, # it doesn't store anything that's associated with the # scoped session. - flask.request.library = fixture.library # type: ignore - response = fixture.app.manager.index_controller() + flask.request.library = scoped_session_fixture.mock_library + response = app.manager.index_controller() assert 302 == response.status_code # Once we exit the context of the Flask request, the - # transaction is rolled back. The Identifier never actually - # enters the database. - # - # If it did enter the database, it would never leave. Changes - # that happen through self._db happen inside a nested - # transaction which is rolled back after the test is over. - # But changes that happen through a session-scoped database - # connection are actually written to the database when we - # leave the scope of the request. - # - # To avoid this, we use test_request_context_and_transaction - # to create a nested transaction that's rolled back just - # before we leave the scope of the request. - assert [] == fixture.db.session.query(Identifier).all() + # transaction is committed and the Identifier is written to the + # database. That is why we run this test with the database_func + # fixture, which gives us a function scoped database to work with. + # This database is removed after the test completes, so we don't + # have to worry about cleaning up the database after ourselves. + [identifier] = scoped_session_fixture.session.query(Identifier).all() + assert "1024" == identifier.identifier # Now create a different simulated Flask request - with self.request_context_and_transaction(scoped, fixture, "/"): + with app.test_request_context("/"): session2 = current_session() - assert session2 != fixture.db - assert session2 != fixture.app.manager._db + assert session2 != scoped_session_fixture.session + assert session2 != app.manager._db # The controller still works in the new request context - # nothing it needs is associated with the previous scoped # session. - flask.request.library = fixture.library # type: ignore - response = fixture.app.manager.index_controller() + flask.request.library = scoped_session_fixture.mock_library + response = app.manager.index_controller() assert 302 == response.status_code # The two Flask requests got different sessions, neither of # which is the same as self._db, the unscoped database session # used by most other unit tests. assert session1 != session2 - - # Make sure that we close the connections for the scoped sessions. - session1.bind.dispose() - session2.bind.dispose() diff --git a/tests/manager/api/test_app.py b/tests/manager/api/test_app.py index b18f993147..d7d79874f1 100644 --- a/tests/manager/api/test_app.py +++ b/tests/manager/api/test_app.py @@ -10,9 +10,14 @@ def test_initialize_application_http( db: DatabaseTransactionFixture, services_fixture_wired: ServicesFixture ): # Use the db transaction fixture so that we don't use the production settings by mistake - with patch.object( - HTTP, "set_quick_failure_settings" - ) as mock_set_quick_failure_settings: + with ( + patch.object( + HTTP, "set_quick_failure_settings" + ) as mock_set_quick_failure_settings, + patch("palace.manager.api.app.initialize_database"), + patch("palace.manager.api.app.initialize_circulation_manager"), + patch("palace.manager.api.app.app"), + ): # Initialize the app, which will set the HTTP configuration initialize_application() diff --git a/tests/manager/api/test_overdrive.py b/tests/manager/api/test_overdrive.py index 032eab4e9b..317cb0a662 100644 --- a/tests/manager/api/test_overdrive.py +++ b/tests/manager/api/test_overdrive.py @@ -84,19 +84,6 @@ from tests.fixtures.time import Time -@pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" - ) - _server.stop() - - class OverdriveFilesFixture(FilesFixture): """A fixture providing access to Overdrive files.""" diff --git a/tests/manager/api/test_scripts.py b/tests/manager/api/test_scripts.py index e3b8041cfc..743018dfa5 100644 --- a/tests/manager/api/test_scripts.py +++ b/tests/manager/api/test_scripts.py @@ -738,7 +738,7 @@ def export(self, _db, start, end): output = StringIO() cmd_args = ["--start=20190820", "--end=20190827"] exporter = MockLocalAnalyticsExporter() - script = LocalAnalyticsExportScript() + script = LocalAnalyticsExportScript(_db=db.session) script.do_run(output=output, cmd_args=cmd_args, exporter=exporter) assert "test" == output.getvalue() assert ["20190820", "20190827"] == exporter.called_with diff --git a/tests/manager/search/test_external_search.py b/tests/manager/search/test_external_search.py index d3a41e2144..8f6cd1a45d 100644 --- a/tests/manager/search/test_external_search.py +++ b/tests/manager/search/test_external_search.py @@ -1979,12 +1979,6 @@ def works(worklist, facets): debug=True, ) - def assert_featured(description, worklist, facets, expect): - # Generate a list of featured works for the given `worklist` - # and compare that list against `expect`. - actual = works(worklist, facets) - fixture.assert_works(description, expect, actual) - worklist = WorkList() worklist.initialize(transaction.default_library()) facets = FeaturedFacets(1, random_seed=Filter.DETERMINISTIC) @@ -2006,12 +2000,11 @@ def assert_featured(description, worklist, facets, expect): ) # The featured work appears above the non-featured work, # even though it's lower quality and is not available. - assert_featured( - "Works from WorkList based on CustomList", - best_sellers, - facets, - [data.featured_on_list, data.not_featured_on_list], - ) + expect = [data.featured_on_list, data.not_featured_on_list] + # Generate a list of featured works for the given `worklist` + # and compare that list against `expect`. + actual = works(best_sellers, facets) + fixture.assert_works("Works from WorkList based on CustomList", expect, actual) # By changing the minimum_featured_quality you can control # at what point a work is considered 'featured' -- at which @@ -2040,34 +2033,45 @@ def assert_featured(description, worklist, facets, expect): assert data.featured_on_list in last_two # Up to this point we've been avoiding the random element, - # but we can introduce that now by passing in a numeric seed. - # In normal usage, the current time is used as the seed. + # but we can introduce that now by letting the random_seed + # parameter be None. # # The random element is relatively small, so it mainly acts # to rearrange works whose scores were similar before. # - # The order of the works when using random depends on 4 things: - # - The seed - # - The id (work_id) - # - The index name - # - The shard id - # If any of those change the order of works in this result may change, - # and hence the order of works in this assert must also change - # E.g. If the index version changes from v5 to v6, this may affect the order of works queried - # Keeping everything else the same, the order of works will remain reproducible across test runs - random_facets = FeaturedFacets(1, random_seed=43) - assert_featured( - "Works permuted by a random seed", - worklist, - random_facets, - [ - data.hq_available_2, - data.hq_available, - data.default_quality, - data.hq_not_available, - data.not_featured_on_list, - data.featured_on_list, - ], + # In this test we have two groups of works -- one group is + # high-quality and available, the other group is low-quality. + # With the random element in play, the high-quality works will + # be randomly permuted among themselves, and the low-quality + # works will be randomly permuted among themselves. + # However, the high-quality works will always show up before + # the low-quality works. + random_facets = FeaturedFacets(1) + expect_high_quality = [ + data.hq_available_2, + data.hq_available, + ] + expect_low_quality = [ + data.default_quality, + data.hq_not_available, + data.not_featured_on_list, + data.featured_on_list, + ] + # Generate a list of featured works for the given `worklist` + # and compare that list against `expect`. + actual_random = works(worklist, random_facets) + assert len(actual_random) == 6 + fixture.assert_works( + "Works permuted by a random seed (high quality)", + expect_high_quality, + actual_random[:2], + should_be_ordered=False, + ) + fixture.assert_works( + "Works permuted by a random seed (low quality)", + expect_low_quality, + actual_random[2:], + should_be_ordered=False, ) diff --git a/tests/manager/search/test_migration_states.py b/tests/manager/search/test_migration_states.py index 12a5cbf22d..1f2e43232d 100644 --- a/tests/manager/search/test_migration_states.py +++ b/tests/manager/search/test_migration_states.py @@ -29,32 +29,24 @@ def test_initial_migration_case( self, external_search_fixture: ExternalSearchFixture ): fx = external_search_fixture - db = fx.db index = fx.index service = fx.service - # Ensure we are in the initial state, no test indices and pointer available - prefix = fx.search_config.index_prefix - all_indices = fx.client.indices.get("*") - for index_name in all_indices.keys(): - if prefix in index_name: - fx.client.indices.delete(index_name) - - # We cannot make any requests before we intitialize + # We cannot make any requests before we initialize with pytest.raises(Exception) as raised: index.query_works("") assert "index_not_found" in str(raised.value) - # When a new sytem comes up the first code to run is the InstanceInitailization script + # When a new sytem comes up the first code to run is the InstanceInitialization script # This preps the DB and the search indices/pointers InstanceInitializationScript().initialize_search_indexes() # Ensure we have created the index and pointers - new_index_name = index._revision.name_for_index(prefix) - empty_index_name = service._empty(prefix) + new_index_name = index._revision.name_for_index(fx.index_prefix) + empty_index_name = service._empty(fx.index_prefix) all_indices = fx.client.indices.get("*") - assert prefix in new_index_name + assert fx.index_prefix in new_index_name assert new_index_name in all_indices.keys() assert empty_index_name in all_indices.keys() assert fx.client.indices.exists_alias( diff --git a/tests/manager/search/test_service.py b/tests/manager/search/test_service.py index d0195dacd1..46117fa145 100644 --- a/tests/manager/search/test_service.py +++ b/tests/manager/search/test_service.py @@ -2,7 +2,6 @@ from palace.manager.search.document import LONG, SearchMappingDocument from palace.manager.search.revision import SearchSchemaRevision -from palace.manager.search.service import SearchServiceOpensearch1 from tests.fixtures.search import ExternalSearchFixture @@ -18,9 +17,6 @@ def mapping_document(self) -> SearchMappingDocument: return self.document -BASE_NAME = "base" - - class TestService: """ Tests to verify that the Opensearch service implementation has the semantics we expect. @@ -30,89 +26,75 @@ def test_create_empty_idempotent( self, external_search_fixture: ExternalSearchFixture ): """Creating the empty index is idempotent.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service service.create_empty_index() - - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-empty") - service.create_empty_index() indices = external_search_fixture.client.indices.client.indices assert indices is not None - assert indices.exists("base-empty") + assert indices.exists(f"{external_search_fixture.index_prefix}-empty") def test_create_index_idempotent( self, external_search_fixture: ExternalSearchFixture ): """Creating any index is idempotent.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service revision = BasicMutableRevision(23) service.index_create(revision) service.index_create(revision) - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-v23") - indices = external_search_fixture.client.indices.client.indices assert indices is not None - assert indices.exists(revision.name_for_index("base")) + assert indices.exists( + revision.name_for_index(external_search_fixture.index_prefix) + ) def test_read_pointer_none(self, external_search_fixture: ExternalSearchFixture): """The read pointer is initially unset.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) - assert None == service.read_pointer() + service = external_search_fixture.service + assert service.read_pointer() is None def test_write_pointer_none(self, external_search_fixture: ExternalSearchFixture): """The write pointer is initially unset.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) - assert None == service.write_pointer() + service = external_search_fixture.service + assert service.write_pointer() is None def test_read_pointer_set(self, external_search_fixture: ExternalSearchFixture): """Setting the read pointer works.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service revision = BasicMutableRevision(23) service.index_create(revision) - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-v23") - service.read_pointer_set(revision) - assert "base-v23" == service.read_pointer() + assert service.read_pointer() == f"{external_search_fixture.index_prefix}-v23" def test_read_pointer_set_empty( self, external_search_fixture: ExternalSearchFixture ): """Setting the read pointer to the empty index works.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service service.create_empty_index() - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-empty") - service.read_pointer_set_empty() - assert "base-empty" == service.read_pointer() + assert service.read_pointer() == f"{external_search_fixture.index_prefix}-empty" def test_write_pointer_set(self, external_search_fixture: ExternalSearchFixture): """Setting the write pointer works.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service revision = BasicMutableRevision(23) service.index_create(revision) - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-v23") - service.write_pointer_set(revision) pointer = service.write_pointer() assert pointer is not None - assert "base-v23" == pointer.target_name + assert pointer.target_name == f"{external_search_fixture.index_prefix}-v23" def test_populate_index_idempotent( self, external_search_fixture: ExternalSearchFixture ): """Populating an index is idempotent.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service revision = BasicMutableRevision(23) mappings = revision.mapping_document() @@ -145,14 +127,20 @@ def test_populate_index_idempotent( service.index_create(revision) - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-v23") - service.index_submit_documents("base-v23", documents) - service.index_submit_documents("base-v23", documents) + service.index_submit_documents( + f"{external_search_fixture.index_prefix}-v23", documents + ) + service.index_submit_documents( + f"{external_search_fixture.index_prefix}-v23", documents + ) indices = external_search_fixture.client.indices.client.indices assert indices is not None - assert indices.exists(revision.name_for_index("base")) - assert indices.get(revision.name_for_index("base"))["base-v23"]["mappings"] == { + assert indices.exists( + revision.name_for_index(external_search_fixture.index_prefix) + ) + assert indices.get( + revision.name_for_index(external_search_fixture.index_prefix) + )[f"{external_search_fixture.index_prefix}-v23"]["mappings"] == { "properties": mappings.serialize_properties() } diff --git a/tests/migration/conftest.py b/tests/migration/conftest.py index ec79f15463..c08c518f33 100644 --- a/tests/migration/conftest.py +++ b/tests/migration/conftest.py @@ -12,8 +12,7 @@ from pytest_alembic.config import Config from palace.manager.sqlalchemy.session import json_serializer -from tests.fixtures.database import ApplicationFixture, DatabaseFixture -from tests.fixtures.services import ServicesFixture +from tests.fixtures.database import DatabaseFixture if TYPE_CHECKING: import alembic.config @@ -23,48 +22,30 @@ pytest_plugins = [ "tests.fixtures.services", + "tests.fixtures.database", ] -@pytest.fixture(scope="function") -def application( - services_fixture: ServicesFixture, -) -> Generator[ApplicationFixture, None, None]: - app = ApplicationFixture.create() - yield app - app.close() - - -@pytest.fixture(scope="function") -def database(application: ApplicationFixture) -> Generator[DatabaseFixture, None, None]: - # This is very similar to the normal database fixture and uses the same object, - # but because these tests are done outside a transaction, we need this fixture - # to have function scope, so the database schema is completely reset between - # tests. - db = DatabaseFixture.create() - yield db - db.close() +@pytest.fixture +def alembic_config_path() -> Path: + return Path(__file__).parent.parent.parent.absolute() / "alembic.ini" @pytest.fixture -def alembic_config() -> Config: +def alembic_config(alembic_config_path: Path) -> Config: """ Use an explicit path to the alembic config file. This lets us run pytest from a different directory than the root of the project. """ - return Config( - config_options={ - "file": str(Path(__file__).parent.parent.parent.absolute() / "alembic.ini") - } - ) + return Config(config_options={"file": str(alembic_config_path)}) @pytest.fixture -def alembic_engine(database: DatabaseFixture) -> Engine: +def alembic_engine(database_func: DatabaseFixture) -> Engine: """ Override this fixture to provide pytest-alembic powered tests with a database handle. """ - return database._engine + return database_func.engine @pytest.fixture diff --git a/tests/migration/test_instance_init_script.py b/tests/migration/test_instance_init_script.py index 217df1e519..234dca3cce 100644 --- a/tests/migration/test_instance_init_script.py +++ b/tests/migration/test_instance_init_script.py @@ -1,28 +1,75 @@ import logging import sys +from collections.abc import Generator +from contextlib import contextmanager from io import StringIO from multiprocessing import Process -from unittest.mock import MagicMock, Mock +from pathlib import Path +from unittest.mock import MagicMock, Mock, patch +import pytest from pytest_alembic import MigrationContext from sqlalchemy import inspect -from sqlalchemy.engine import Engine +from typing_extensions import Self +from palace.manager.core.config import Configuration from palace.manager.scripts import InstanceInitializationScript from palace.manager.sqlalchemy.session import SessionManager -from tests.fixtures.database import ApplicationFixture -from tests.fixtures.services import mock_services_container - - -def _run_script() -> None: +from tests.fixtures.database import DatabaseFixture, DatabaseNameFixture +from tests.fixtures.services import ServicesFixture, mock_services_container + + +class InstanceInitScriptFixture: + def __init__( + self, + database_func: DatabaseFixture, + services_fixture: ServicesFixture, + alembic_config_path: Path, + ): + self.database = database_func + self.services = services_fixture + self.alembic_config_path = alembic_config_path + + def script(self) -> InstanceInitializationScript: + return InstanceInitializationScript(config_file=self.alembic_config_path) + + @classmethod + @contextmanager + def fixture( + cls, + database_func: DatabaseFixture, + services_fixture: ServicesFixture, + alembic_config_path: Path, + ) -> Generator[Self, None, None]: + fixture = cls(database_func, services_fixture, alembic_config_path) + with patch.object(SessionManager, "engine", fixture.database.engine_func): + yield fixture + + +@pytest.fixture +def instance_init_script_fixture( + database_func: DatabaseFixture, + services_fixture: ServicesFixture, + alembic_config_path: Path, +) -> Generator[InstanceInitScriptFixture, None, None]: + with InstanceInitScriptFixture.fixture( + database_func, services_fixture, alembic_config_path + ) as fixture: + yield fixture + + +def _run_script(config_path: Path, worker_url: str) -> None: try: # Capturing the log output stream = StringIO() logging.basicConfig(stream=stream, level=logging.INFO, force=True) mock_services = MagicMock() - with mock_services_container(mock_services): - script = InstanceInitializationScript() + with ( + mock_services_container(mock_services), + patch.object(Configuration, "database_url", return_value=worker_url), + ): + script = InstanceInitializationScript(config_file=config_path) script.run() # Set our exit code to the number of upgrades we ran @@ -34,7 +81,11 @@ def _run_script() -> None: sys.exit(-1) -def test_locking(alembic_runner: MigrationContext, alembic_engine: Engine) -> None: +def test_locking( + alembic_runner: MigrationContext, + alembic_config_path: Path, + database_name_func: DatabaseNameFixture, +) -> None: # Migrate to the initial revision alembic_runner.migrate_down_to("base") @@ -42,9 +93,13 @@ def test_locking(alembic_runner: MigrationContext, alembic_engine: Engine) -> No # at the same time. One of them should do the migration, and # the other two should wait, then do no migration since it # has already been done. - p1 = Process(target=_run_script) - p2 = Process(target=_run_script) - p3 = Process(target=_run_script) + process_kwargs = { + "config_path": alembic_config_path, + "worker_url": database_name_func.worker_url, + } + p1 = Process(target=_run_script, kwargs=process_kwargs) + p2 = Process(target=_run_script, kwargs=process_kwargs) + p3 = Process(target=_run_script, kwargs=process_kwargs) p1.start() p2.start() @@ -67,17 +122,17 @@ def test_locking(alembic_runner: MigrationContext, alembic_engine: Engine) -> No assert exit_codes[2] == 0 -def test_initialize(application: ApplicationFixture) -> None: - # Run the script and make sure we create the alembic_version table +def test_initialize(instance_init_script_fixture: InstanceInitScriptFixture) -> None: + # Drop any existing schema + instance_init_script_fixture.database.drop_existing_schema() - application.drop_existing_schema() - - engine = SessionManager.engine() + # Run the script and make sure we create the alembic_version table + engine = instance_init_script_fixture.database.engine inspector = inspect(engine) assert "alembic_version" not in inspector.get_table_names() assert len(inspector.get_table_names()) == 0 - script = InstanceInitializationScript() + script = instance_init_script_fixture.script() script.initialize_database = Mock(wraps=script.initialize_database) script.migrate_database = Mock(wraps=script.migrate_database) script.run() @@ -97,7 +152,10 @@ def test_initialize(application: ApplicationFixture) -> None: assert script.migrate_database.call_count == 1 -def test_migrate(alembic_runner: MigrationContext) -> None: +def test_migrate( + alembic_runner: MigrationContext, + instance_init_script_fixture: InstanceInitScriptFixture, +) -> None: # Run the script and make sure we create the alembic_version table # Migrate to the initial revision alembic_runner.migrate_down_to("base") @@ -106,7 +164,7 @@ def test_migrate(alembic_runner: MigrationContext) -> None: assert alembic_runner.current == "base" assert alembic_runner.current != alembic_runner.heads[0] - script = InstanceInitializationScript() + script = instance_init_script_fixture.script() script.initialize_database = Mock(wraps=script.initialize_database) script.migrate_database = Mock(wraps=script.migrate_database) script.run() diff --git a/tox.ini b/tox.ini index 70161a18ba..599f14598e 100644 --- a/tox.ini +++ b/tox.ini @@ -9,12 +9,11 @@ commands_pre = commands = pytest {posargs:tests/manager} passenv = - SIMPLIFIED_* PALACE_* CI setenv = COVERAGE_FILE = .coverage.{envname} - docker: SIMPLIFIED_TEST_DATABASE=postgresql://simplified_test:test@localhost:9005/simplified_circulation_test + docker: PALACE_TEST_DATABASE_URL=postgresql://palace:test@localhost:9005 docker: PALACE_TEST_SEARCH_URL=http://localhost:9007 docker: PALACE_TEST_MINIO_ENDPOINT_URL=http://localhost:9004 docker: PALACE_TEST_MINIO_USER=palace @@ -46,9 +45,8 @@ allowlist_externals = [docker:db-circ] image = postgres:12 environment = - POSTGRES_USER=simplified_test + POSTGRES_USER=palace POSTGRES_PASSWORD=test - POSTGRES_DB=simplified_circulation_test ports = 9005:5432/tcp healthcheck_cmd = pg_isready