diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c4bf85efb1..1e49729ac3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -195,7 +195,7 @@ jobs: bash -c " source env/bin/activate && poetry install --without ci --no-root --sync && - pytest --no-cov tests/manager + pytest --no-cov tests " env: BUILD_CACHE_FROM: type=gha,scope=buildkit-${{ github.run_id }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index abd1ea7cc2..58e3889b2b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -52,49 +52,7 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} files: ./coverage.xml - flags: manager - name: manager-${{ matrix.python-version }} - verbose: true - - test-migrations: - name: Migration Tests - runs-on: ubuntu-latest - permissions: - contents: read - - steps: - - uses: actions/checkout@v4 - - # See comment here: https://github.com/actions/runner-images/issues/1187#issuecomment-686735760 - - name: Disable network offload - run: sudo ethtool -K eth0 tx off rx off - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install Poetry - uses: ./.github/actions/poetry - with: - cache: true - - - name: Install Tox - run: | - poetry install --only ci --no-root - env: - POETRY_VIRTUALENVS_CREATE: false - - - name: Run Migration Tests - run: tox -e "migration-docker" - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: ./coverage.xml - flags: migration - name: "migration-3.10" + name: test-${{ matrix.python-version }} verbose: true docker-test-migrations: diff --git a/README.md b/README.md index ea96a650c5..5c0c1402d7 100644 --- a/README.md +++ b/README.md @@ -636,20 +636,27 @@ tox -e "py310-docker" If you already have elastic search or postgres running locally, you can run them instead by setting the following environment variables: -- `SIMPLIFIED_TEST_DATABASE` +- `PALACE_TEST_DATABASE_URL` - `PALACE_TEST_SEARCH_URL` Make sure the ports and usernames are updated to reflect the local configuration. ```sh # Set environment variables -export SIMPLIFIED_TEST_DATABASE="postgresql://simplified_test:test@localhost:9005/simplified_circulation_test" -export SIMPLIFIED_TEST_OPENSEARCH="http://localhost:9200" +export PALACE_TEST_DATABASE_URL="postgresql://simplified_test:test@localhost:9005/simplified_circulation_test" +export PALACE_TEST_SEARCH_URL="http://localhost:9200" # Run tox tox -e "py310" ``` +The tests assume that they have permission to create and drop databases. They connect to the +provided database URL and create a new database for each test run. If the user does not have permission +to create and drop databases, the tests will fail. You can disable this behavior by setting the +`PALACE_TEST_DATABASE_CREATE_DATABASE` environment variable to `false`. + +```sh + ### Override `pytest` arguments If you wish to pass additional arguments to `pytest` you can do so through `tox`. Every argument passed after a `--` to @@ -752,16 +759,10 @@ This profiler uses [PyInstrument](https://pyinstrument.readthedocs.io/en/latest/ PyInstrument can also be used to profile the test suite. This can be useful to identify slow tests, or to identify performance regressions. -To profile the core test suite, run the following command: - -```sh -pyinstrument -m pytest --no-cov tests/core/ -``` - -To profile the API test suite, run the following command: +To profile the test suite, run the following command: ```sh -pyinstrument -m pytest --no-cov tests/api/ +pyinstrument -m pytest --no-cov -n 0 tests ``` #### Environment Variables diff --git a/docker-compose.yml b/docker-compose.yml index fecf154f06..143fc3d351 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -22,7 +22,7 @@ x-cm-variables: &cm PALACE_CELERY_CLOUDWATCH_STATISTICS_DRYRUN: "true" # Set up the environment variables used for testing as well - SIMPLIFIED_TEST_DATABASE: "postgresql://palace:test@pg:5432/circ" + PALACE_TEST_DATABASE_URL: "postgresql://palace:test@pg:5432/circ" PALACE_TEST_SEARCH_URL: "http://os:9200" PALACE_TEST_MINIO_ENDPOINT_URL: "http://minio:9000" PALACE_TEST_MINIO_USER: "palace" diff --git a/docker/README.md b/docker/README.md index e009866e73..46767bcd7b 100644 --- a/docker/README.md +++ b/docker/README.md @@ -86,7 +86,7 @@ Environment variables can be set with the `-e VARIABLE_KEY='variable_value'` opt *Required.* The URL of the production PostgreSQL database for the application. -### `SIMPLIFIED_TEST_DATABASE` +### `PALACE_TEST_DATABASE_URL` *Optional.* The URL of a PostgreSQL database for tests. This optional variable allows unit tests to be run in the container. diff --git a/poetry.lock b/poetry.lock index 15e972198e..729a56dbff 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1235,6 +1235,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "execnet" +version = "2.1.1" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + [[package]] name = "expiringdict" version = "1.2.2" @@ -2266,6 +2280,7 @@ files = [ {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, @@ -3590,6 +3605,26 @@ files = [ [package.dependencies] pytest = ">=7.0.0" +[[package]] +name = "pytest-xdist" +version = "3.6.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -4907,4 +4942,4 @@ lxml = ">=3.8" [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "ef03f72fe652d1b7e53071457a4023a6e97147cff88aeca4919e251e0e4a5fb2" +content-hash = "f170388105405aa1f69a194f5c5e77b095d657e423eab88f3b22ab0a530f8fa8" diff --git a/pyproject.toml b/pyproject.toml index 30c94af89e..74fbca53c0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,11 +9,14 @@ exclude_also = [ '^\s*pass\s*$', '^\s*raise NotImplementedError\s*$', ] +include_namespace_packages = true [tool.coverage.run] branch = true +concurrency = ["multiprocessing", "thread"] +parallel = true relative_files = true -source = ["palace.manager"] +source = ["src"] [tool.isort] known_first_party = ["palace"] @@ -286,6 +289,7 @@ pytest-alembic = "^0.11.0" pytest-celery = "^0.0.0" pytest-cov = "^5.0.0" pytest-timeout = "*" +pytest-xdist = "^3.5.0" requests-mock = "1.12.1" types-aws-xray-sdk = "^2.11.0.13" types-Flask-Cors = "^4.0.0" @@ -306,6 +310,8 @@ psycopg2 = "~2.9.5" addopts = [ "--cov", "--cov-report=xml", + "--dist=worksteal", + "--numprocesses=auto", "--strict-markers", ] markers = [ diff --git a/src/palace/manager/core/config.py b/src/palace/manager/core/config.py index 4efa28f483..6430566fe9 100644 --- a/src/palace/manager/core/config.py +++ b/src/palace/manager/core/config.py @@ -41,7 +41,6 @@ class Configuration(ConfigurationConstants): log = logging.getLogger("Configuration file loader") # Environment variables that contain URLs to the database - DATABASE_TEST_ENVIRONMENT_VARIABLE = "SIMPLIFIED_TEST_DATABASE" DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE = "SIMPLIFIED_PRODUCTION_DATABASE" # Environment variable for Overdrive fulfillment keys @@ -72,46 +71,25 @@ class Configuration(ConfigurationConstants): @classmethod def database_url(cls): - """Find the database URL configured for this site. - - For compatibility with old configurations, we will look in the - site configuration first. - - If it's not there, we will look in the appropriate environment - variable. - """ - - # To avoid expensive mistakes, test and production databases - # are always configured with separate keys. The TESTING variable - # controls which database is used, and it's set by the - # package_setup() function called in every component's - # tests/__init__.py. - test = os.environ.get("TESTING", False) - if test: - environment_variable = cls.DATABASE_TEST_ENVIRONMENT_VARIABLE - else: - environment_variable = cls.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE - - url = os.environ.get(environment_variable) + """Find the database URL configured for this site.""" + url = os.environ.get(cls.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE) if not url: raise CannotLoadConfiguration( "Database URL was not defined in environment variable (%s)." - % environment_variable + % cls.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE ) - url_obj = None try: url_obj = make_url(url) except ArgumentError as e: - # Improve the error message by giving a guide as to what's - # likely to work. + # Improve the error message by giving a guide as to what's likely to work. raise ArgumentError( - "Bad format for database URL (%s). Expected something like postgresql://[username]:[password]@[hostname]:[port]/[database name]" - % url + "Bad format for database URL. Expected something like postgresql://[username]:[password]@[hostname]:[port]/[database name]" ) - # Calling __to_string__ will hide the password. - logging.info("Connecting to database: %s" % url_obj.__to_string__()) + logging.info( + "Connecting to database: %s" % url_obj.render_as_string(hide_password=True) + ) return url @classmethod diff --git a/src/palace/manager/scripts.py b/src/palace/manager/scripts.py index a171b0adf6..46fbd8ae0b 100644 --- a/src/palace/manager/scripts.py +++ b/src/palace/manager/scripts.py @@ -4,7 +4,7 @@ import os import sys import time -from collections.abc import Sequence +from collections.abc import Callable, Sequence from datetime import timedelta from pathlib import Path from typing import Any @@ -12,7 +12,7 @@ from alembic import command, config from alembic.util import CommandError from sqlalchemy import inspect, select -from sqlalchemy.engine import Connection +from sqlalchemy.engine import Connection, Engine from sqlalchemy.exc import NoResultFound from sqlalchemy.orm import Session @@ -486,7 +486,11 @@ class InstanceInitializationScript: remain idempotent. """ - def __init__(self, config_file: Path | None = None) -> None: + def __init__( + self, + config_file: Path | None = None, + engine_factory: Callable[[], Engine] = SessionManager.engine, + ) -> None: self._log: logging.Logger | None = None self._container = container_instance() @@ -494,6 +498,8 @@ def __init__(self, config_file: Path | None = None) -> None: self._container.init_resources() self._config_file = config_file + self._engine_factory = engine_factory + @property def log(self) -> logging.Logger: if self._log is None: @@ -568,7 +574,7 @@ def run(self) -> None: instance of the script is running at a time. This prevents multiple instances from trying to initialize the database at the same time. """ - engine = SessionManager.engine() + engine = self._engine_factory() with engine.begin() as connection: with pg_advisory_lock(connection, LOCK_ID_DB_INIT): self.initialize(connection) diff --git a/src/palace/manager/search/service.py b/src/palace/manager/search/service.py index d3d6e9a8cb..0d86bcbee5 100644 --- a/src/palace/manager/search/service.py +++ b/src/palace/manager/search/service.py @@ -110,10 +110,6 @@ def read_pointer_set_empty(self) -> None: def index_create(self, revision: SearchSchemaRevision) -> None: """Atomically create an index for the given base name and revision.""" - @abstractmethod - def indexes_created(self) -> list[str]: - """A log of all the indexes that have been created by this client service.""" - @abstractmethod def index_is_populated(self, revision: SearchSchemaRevision) -> bool: """Return True if the index for the given base name and revision has been populated.""" @@ -172,7 +168,6 @@ def __init__(self, client: OpenSearch, base_revision_name: str): self._search = Search(using=self._client) self._base_revision_name = base_revision_name self._multi_search = MultiSearch(using=self._client) - self._indexes_created: list[str] = [] # Documents are not allowed to automatically create indexes. # AWS OpenSearch only accepts the "flat" format @@ -184,9 +179,6 @@ def __init__(self, client: OpenSearch, base_revision_name: str): def base_revision_name(self) -> str: return self._base_revision_name - def indexes_created(self) -> list[str]: - return self._indexes_created - def write_pointer(self) -> SearchWritePointer | None: try: result: dict = self._client.indices.get_alias( @@ -207,7 +199,6 @@ def create_empty_index(self) -> None: index_name = self._empty(self.base_revision_name) self._logger.debug(f"creating empty index {index_name}") self._client.indices.create(index=index_name) - self._indexes_created.append(index_name) except RequestError as e: if e.error == "resource_already_exists_exception": return @@ -261,7 +252,6 @@ def index_create(self, revision: SearchSchemaRevision) -> None: index=index_name, body=revision.mapping_document().serialize(), ) - self._indexes_created.append(index_name) except RequestError as e: if e.error == "resource_already_exists_exception": return diff --git a/src/palace/manager/sqlalchemy/session.py b/src/palace/manager/sqlalchemy/session.py index 9c5f1f003d..8f9d731fcb 100644 --- a/src/palace/manager/sqlalchemy/session.py +++ b/src/palace/manager/sqlalchemy/session.py @@ -2,12 +2,11 @@ import json import logging -import os from typing import Any from pydantic.json import pydantic_encoder from sqlalchemy import create_engine, event, literal_column, select, table, text -from sqlalchemy.engine import Connection +from sqlalchemy.engine import Connection, Engine from sqlalchemy.orm import Session, sessionmaker from sqlalchemy.pool import Pool @@ -44,7 +43,9 @@ class SessionManager: RECURSIVE_EQUIVALENTS_FUNCTION = "recursive_equivalents.sql" @classmethod - def engine(cls, url: str | None = None, poolclass: type[Pool] | None = None): + def engine( + cls, url: str | None = None, poolclass: type[Pool] | None = None + ) -> Engine: url = url or Configuration.database_url() return create_engine( url, @@ -62,18 +63,8 @@ def setup_event_listener( return session @classmethod - def sessionmaker(cls, url=None, session=None): - if not (url or session): - url = Configuration.database_url() - if url: - bind_obj = cls.engine(url) - elif session: - bind_obj = session.get_bind() - if not os.environ.get("TESTING"): - # If a factory is being created from a session in test mode, - # use the same Connection for all of the tests so objects can - # be accessed. Otherwise, bind against an Engine object. - bind_obj = bind_obj.engine + def sessionmaker(cls): + bind_obj = cls.engine() session_factory = sessionmaker(bind=bind_obj) cls.setup_event_listener(session_factory) return session_factory diff --git a/tests/manager/conftest.py b/tests/conftest.py similarity index 100% rename from tests/manager/conftest.py rename to tests/conftest.py diff --git a/tests/fixtures/api_controller.py b/tests/fixtures/api_controller.py index 38e7542228..7b8c5de333 100644 --- a/tests/fixtures/api_controller.py +++ b/tests/fixtures/api_controller.py @@ -83,7 +83,6 @@ def __init__( self, db: DatabaseTransactionFixture, services_fixture: ServicesFixture, - setup_cm: bool, ): self.db = db self.app = app @@ -102,11 +101,10 @@ def __init__( self.search_index = ExternalSearchIndexFake() self.services_fixture.services.search.index.override(self.search_index) - if setup_cm: - # NOTE: Any reference to self._default_library below this - # point in this method will cause the tests in - # TestScopedSession to hang. - app.manager = self.circulation_manager_setup() + # NOTE: Any reference to self._default_library below this + # point in this method will cause the tests in + # TestScopedSession to hang. + app.manager = self.circulation_manager_setup() @contextmanager def wired_container(self): @@ -259,19 +257,7 @@ def controller_fixture( db: DatabaseTransactionFixture, services_fixture: ServicesFixture ): time_then = datetime.datetime.now() - fixture = ControllerFixture(db, services_fixture, setup_cm=True) - time_now = datetime.datetime.now() - time_diff = time_now - time_then - logging.info("controller init took %s", time_diff) - yield fixture - - -@pytest.fixture(scope="function") -def controller_fixture_without_cm( - db: DatabaseTransactionFixture, services_fixture: ServicesFixture -): - time_then = datetime.datetime.now() - fixture = ControllerFixture(db, services_fixture, setup_cm=False) + fixture = ControllerFixture(db, services_fixture) time_now = datetime.datetime.now() time_diff = time_now - time_then logging.info("controller init took %s", time_diff) @@ -315,7 +301,7 @@ class CirculationControllerFixture(ControllerFixture): def __init__( self, db: DatabaseTransactionFixture, services_fixture: ServicesFixture ): - super().__init__(db, services_fixture, setup_cm=True) + super().__init__(db, services_fixture) self.works = [] self.add_works(self.BOOKS) diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index a363b6793e..8c65370681 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -2,30 +2,35 @@ import importlib import logging -import os import shutil import tempfile import time import uuid from collections.abc import Generator, Iterable from contextlib import contextmanager +from functools import cached_property from textwrap import dedent from typing import Any +from unittest.mock import patch import pytest import sqlalchemy from Crypto.PublicKey.RSA import import_key -from sqlalchemy import MetaData -from sqlalchemy.engine import Connection, Engine, Transaction +from pydantic import PostgresDsn +from sqlalchemy import MetaData, create_engine, text +from sqlalchemy.engine import Connection, Engine, Transaction, make_url from sqlalchemy.orm import Session, sessionmaker +from typing_extensions import Self from palace.manager.api.discovery.opds_registration import OpdsRegistrationService from palace.manager.core.classifier import Classifier from palace.manager.core.config import Configuration +from palace.manager.core.exceptions import BasePalaceException from palace.manager.core.opds_import import OPDSAPI from palace.manager.integration.configuration.library import LibrarySettings from palace.manager.integration.goals import Goals from palace.manager.integration.registry.discovery import DiscoveryRegistry +from palace.manager.service.configuration import ServiceConfiguration from palace.manager.sqlalchemy.constants import MediaTypes from palace.manager.sqlalchemy.model.classification import ( Classification, @@ -60,102 +65,314 @@ from palace.manager.sqlalchemy.session import SessionManager from palace.manager.sqlalchemy.util import create, get_one_or_create from palace.manager.util.datetime_helpers import utc_now +from tests.fixtures.services import ServicesFixture -class ApplicationFixture: - """The ApplicationFixture is a representation of the state that must be set up in order to run the application for - testing.""" +class TestIdFixture: + """ + This fixture creates a unique test id. This ID is suitable for initializing shared resources. + For example - database name, opensearch index name, etc. + """ - @classmethod - def drop_existing_schema(cls): - engine = SessionManager.engine() - metadata_obj = MetaData() - metadata_obj.reflect(bind=engine) - metadata_obj.drop_all(engine) - metadata_obj.clear() + def __init__(self, worker_id: str, prefix: str): + # worker_id comes from the pytest-xdist fixture + self._worker_id = worker_id + + # This flag indicates that the tests are running in parallel mode. + self.parallel = worker_id != "master" + + # We create a unique run id for each test run. The dashes are + # replaced with underscores to make it a valid identifier for + # in PostgreSQL. + self.run_id = str(uuid.uuid4()).replace("-", "_") + + self._prefix = prefix + + @cached_property + def id(self) -> str: + # The test id is a combination of the prefix, worker id and run id. + # This is the ID that should be used to create unique resources. + return f"{self._prefix}_{self._worker_id}_{self.run_id}" + + +@pytest.fixture(scope="session") +def session_test_id(worker_id: str) -> TestIdFixture: + """ + This is a session scoped fixture that provides a unique test id. Since session scoped fixtures + are created only once per worker, per test run, this fixture provides a unique test ID that is + stable for the worker for the entire test run. + + This is useful when initializing session scoped shared resources like databases, opensearch indexes, etc. + """ + return TestIdFixture(worker_id, "session") + + +@pytest.fixture(scope="function") +def function_test_id(worker_id: str) -> TestIdFixture: + """ + This is a function scoped fixture that provides a unique test id. Since function scoped fixtures + are created for each test function, this fixture provides a unique test ID that for each test function. + + This is useful when initializing function scoped shared resources. + """ + + return TestIdFixture(worker_id, "function") + + +class DatabaseTestConfiguration(ServiceConfiguration): + url: PostgresDsn + create_database: bool = True + + class Config: + env_prefix = "PALACE_TEST_DATABASE_" + + +class DatabaseCreationFixture: + """ + Uses the configured database URL to create a unique database for each test run. The database + is dropped after the test run is complete. + + Database creation can be disabled by setting the `create_database` flag to False in the configuration. + In this case the database URL is used as is. + """ + + def __init__(self, test_id: TestIdFixture): + self.test_id = test_id + config = DatabaseTestConfiguration() + if not config.create_database and self.test_id.parallel: + raise BasePalaceException( + "Database creation is disabled, but tests are running in parallel mode. " + "This is not supported. Please enable database creation or run tests in serial mode." + ) + self.create_database = config.create_database + self._config_url = make_url(config.url) + + @cached_property + def database_name(self) -> str: + """ + Returns the name of the database that the test should use. + """ + + if not self.create_database: + if self._config_url.database is None: + raise BasePalaceException( + "Database name is required when database creation is disabled." + ) + return self._config_url.database + + return self.test_id.id + + @cached_property + def url(self) -> str: + """ + Returns the Postgres URL for the database that the test should use. This URL + includes credentials and the database name, so it has everything needed to + connect to the database. + """ + + return str(self._config_url.set(database=self.database_name)) + + @contextmanager + def _db_connection(self) -> Generator[Connection, None, None]: + """ + Databases need to be created and dropped outside a transaction. This method + provides a connection to database URL provided in the configuration that is not + wrapped in a transaction. + """ + + engine = create_engine(self._config_url, isolation_level="AUTOCOMMIT") + connection = engine.connect() + try: + yield connection + finally: + connection.close() + engine.dispose() + + def _create_db(self) -> None: + if not self.create_database: + return + + with self._db_connection() as connection: + user = self._config_url.username + connection.execute(text(f"CREATE DATABASE {self.database_name}")) + connection.execute( + text(f"GRANT ALL PRIVILEGES ON DATABASE {self.database_name} TO {user}") + ) + + def _drop_db(self) -> None: + if not self.create_database: + return + + with self._db_connection() as connection: + connection.execute(text(f"DROP DATABASE {self.database_name}")) + + @contextmanager + def patch_database_url(self) -> Generator[None, None, None]: + """ + This method patches the database URL, so any code that uses it will use the worker specific database. + """ + with patch.object(Configuration, "database_url", return_value=self.url): + yield @classmethod - def create(cls): - # This will make sure we always connect to the test database. - os.environ["TESTING"] = "true" + @contextmanager + def fixture(cls, test_id: TestIdFixture) -> Generator[Self, None, None]: + db_name_fixture = cls(test_id) + db_name_fixture._create_db() + try: + yield db_name_fixture + finally: + db_name_fixture._drop_db() - # Drop any existing schema. It will be recreated when the database is initialized. - _cls = cls() - _cls.drop_existing_schema() - return _cls - def close(self): - if "TESTING" in os.environ: - del os.environ["TESTING"] +@pytest.fixture(scope="session") +def database_creation( + session_test_id: TestIdFixture, +) -> Generator[DatabaseCreationFixture, None, None]: + """ + This is a session scoped fixture that provides a unique database for each worker in the test run. + """ + with DatabaseCreationFixture.fixture(session_test_id) as fixture: + yield fixture + + +@pytest.fixture(scope="function") +def function_database_creation( + function_test_id: TestIdFixture, +) -> Generator[DatabaseCreationFixture, None, None]: + """ + This is a function scoped fixture that provides a unique database for each test function. + + This is resource intensive, so it should only be used when necessary. It is helpful + when testing database interactions that cannot be tested in a transaction. Such as + instance initialization, schema migrations, etc. + """ + with DatabaseCreationFixture.fixture(function_test_id) as fixture: + if not fixture.create_database: + raise BasePalaceException( + "Cannot provide a function scoped database when database creation is disabled." + ) + with fixture.patch_database_url(): + yield fixture class DatabaseFixture: - """The DatabaseFixture stores a reference to the database.""" + """ + The DatabaseFixture initializes the database schema and creates a connection to the database + that should be used in the tests. + """ - _engine: Engine - _connection: Connection + def __init__(self, database_name: DatabaseCreationFixture) -> None: + self.database_name = database_name + self.engine = self.engine_factory() + self.connection = self.engine.connect() - def __init__(self, engine: Engine, connection: Connection): - self._engine = engine - self._connection = connection + def engine_factory(self) -> Engine: + return SessionManager.engine(self.database_name.url) - @staticmethod - def _get_database_connection() -> tuple[Engine, Connection]: - url = Configuration.database_url() - engine = SessionManager.engine(url) - connection = engine.connect() - return engine, connection + def drop_existing_schema(self) -> None: + metadata_obj = MetaData() + metadata_obj.reflect(bind=self.engine) + metadata_obj.drop_all(self.engine) + metadata_obj.clear() - @staticmethod - def _initialize_database(connection: Connection): - SessionManager.initialize_schema(connection) - with Session(connection) as session: + def _initialize_database(self) -> None: + SessionManager.initialize_schema(self.connection) + with Session(self.connection) as session: # Initialize the database with default data SessionManager.initialize_data(session) @staticmethod - def _load_core_model_classes(): - # Load all the core model classes so that they are registered with the ORM. + def _load_model_classes(): + """ + Make sure that all the model classes are loaded, so that they are registered with the + ORM when we are creating the schema. + """ import palace.manager.sqlalchemy.model importlib.reload(palace.manager.sqlalchemy.model) + def _close(self): + # Destroy the database connection and engine. + self.connection.close() + self.engine.dispose() + + @contextmanager + def patch_engine(self) -> Generator[None, None, None]: + """ + This method patches the SessionManager to use the engine provided by this fixture. + + This is useful when the tests need to access the engine directly. It patches the SessionManager + to use the engine provided by this fixture and patches the engine so that code that calls + dispose() on the engine does not actually dispose it, since it is used by this fixture. + """ + with patch.object(self.engine, "dispose"), patch.object( + SessionManager, "engine", return_value=self.engine + ): + yield + + @contextmanager + def patch_engine_error(self) -> Generator[None, None, None]: + """ + This method patches the SessionManager to raise an exception when the engine is accessed. + This is useful when the tests should not be accessing the engine directly. + """ + with patch.object( + SessionManager, + "engine", + side_effect=BasePalaceException( + "Engine needs to be accessed from fixture within tests." + ), + ): + yield + @classmethod - def create(cls) -> DatabaseFixture: - cls._load_core_model_classes() - engine, connection = cls._get_database_connection() - cls._initialize_database(connection) - return DatabaseFixture(engine, connection) + @contextmanager + def fixture( + cls, database_name: DatabaseCreationFixture + ) -> Generator[Self, None, None]: + db_fixture = cls(database_name) + db_fixture.drop_existing_schema() + db_fixture._load_model_classes() + db_fixture._initialize_database() + try: + yield db_fixture + finally: + db_fixture._close() - def close(self): - # Destroy the database connection and engine. - self._connection.close() - self._engine.dispose() - @property - def connection(self) -> Connection: - return self._connection +@pytest.fixture(scope="session") +def database( + database_creation: DatabaseCreationFixture, +) -> Generator[DatabaseFixture, None, None]: + """ + This is a session scoped fixture that provides a unique database engine and connection + for each worker in the test run. + """ + with DatabaseFixture.fixture(database_creation) as db: + yield db + + +@pytest.fixture(scope="function") +def function_database( + function_database_creation: DatabaseCreationFixture, +) -> Generator[DatabaseFixture, None, None]: + """ + This is a function scoped fixture that provides a unique database engine and connection + for each test. This is resource intensive, so it should only be used when necessary. + """ + with DatabaseFixture.fixture(function_database_creation) as db: + with db.patch_engine_error(): + yield db class DatabaseTransactionFixture: """A fixture representing a single transaction. The transaction is automatically rolled back.""" - _database: DatabaseFixture - _default_library: Library | None - _default_collection: Collection | None - _session: Session - _transaction: Transaction - _counter: int - _isbns: list[str] - - def __init__( - self, database: DatabaseFixture, session: Session, transaction: Transaction - ): + def __init__(self, database: DatabaseFixture, services: ServicesFixture): self._database = database - self._session = session - self._transaction = transaction - self._default_library = None - self._default_collection = None + self._default_library: Library | None = None + self._default_collection: Collection | None = None self._counter = 2000 self._isbns = [ "9780674368279", @@ -163,6 +380,9 @@ def __init__( "9781936460236", "9780316075978", ] + self._services = services + self._session = SessionManager.session_from_connection(database.connection) + self._transaction = database.connection.begin_nested() def _make_default_library(self) -> Library: """Ensure that the default library exists in the given database.""" @@ -176,15 +396,18 @@ def _make_default_library(self) -> Library: collection.libraries.append(library) return library - @staticmethod - def create(database: DatabaseFixture) -> DatabaseTransactionFixture: - # Create a new connection to the database. - session = SessionManager.session_from_connection(database.connection) - - transaction = database.connection.begin_nested() - return DatabaseTransactionFixture(database, session, transaction) - - def close(self): + @classmethod + @contextmanager + def fixture( + cls, database: DatabaseFixture, services: ServicesFixture + ) -> Generator[Self, None, None]: + db = cls(database, services) + try: + yield db + finally: + db._close() + + def _close(self): # Close the session. self._session.close() @@ -871,6 +1094,20 @@ def credential(self, data_source_name=DataSource.GUTENBERG, type=None, patron=No return credential +@pytest.fixture(scope="function") +def db( + database_creation: DatabaseCreationFixture, + database: DatabaseFixture, + services_fixture: ServicesFixture, +) -> Generator[DatabaseTransactionFixture, None, None]: + with DatabaseTransactionFixture.fixture(database, services_fixture) as db: + with ( + database.patch_engine_error(), + database_creation.patch_database_url(), + ): + yield db + + class TemporaryDirectoryConfigurationFixture: """A fixture that configures the Configuration system to use a temporary directory. The directory is cleaned up when the fixture is closed.""" @@ -906,29 +1143,6 @@ def temporary_directory_configuration() -> ( fix.close() -@pytest.fixture(scope="session") -def application() -> Iterable[ApplicationFixture]: - app = ApplicationFixture.create() - yield app - app.close() - - -@pytest.fixture(scope="session") -def database(application: ApplicationFixture) -> Iterable[DatabaseFixture]: - db = DatabaseFixture.create() - yield db - db.close() - - -@pytest.fixture(scope="function") -def db( - database: DatabaseFixture, -) -> Generator[DatabaseTransactionFixture, None, None]: - tr = DatabaseTransactionFixture.create(database) - yield tr - tr.close() - - class IntegrationConfigurationFixture: def __init__(self, db: DatabaseTransactionFixture): self.db = db diff --git a/tests/fixtures/search.py b/tests/fixtures/search.py index cbb4256c18..5626fa7fe6 100644 --- a/tests/fixtures/search.py +++ b/tests/fixtures/search.py @@ -1,6 +1,7 @@ from __future__ import annotations from collections.abc import Generator +from contextlib import contextmanager import pytest from opensearchpy import OpenSearch @@ -14,14 +15,13 @@ from palace.manager.service.search.container import Search from palace.manager.sqlalchemy.model.work import Work from palace.manager.util.log import LoggerMixin -from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.database import DatabaseTransactionFixture, TestIdFixture from tests.fixtures.services import ServicesFixture from tests.mocks.search import SearchServiceFake class SearchTestConfiguration(ServiceConfiguration): url: AnyHttpUrl - index_prefix: str = "test_index" timeout: int = 20 maxsize: int = 25 @@ -38,41 +38,31 @@ class ExternalSearchFixture(LoggerMixin): to ensure that it works well overall, with a realistic index. """ - def __init__(self, db: DatabaseTransactionFixture, services: Services): + def __init__( + self, db: DatabaseTransactionFixture, services: Services, test_id: TestIdFixture + ): self.search_config = SearchTestConfiguration() self.services_container = services + self.index_prefix = test_id.id # Set up our testing search instance in the services container self.search_container = Search() - self.search_container.config.from_dict(self.search_config.dict()) + search_config_dict = self.search_config.dict() + search_config_dict["index_prefix"] = self.index_prefix + self.search_container.config.from_dict(search_config_dict) self.services_container.search.override(self.search_container) - self._indexes_created: list[str] = [] self.db = db self.client: OpenSearch = services.search.client() self.service: SearchServiceOpensearch1 = services.search.service() self.index: ExternalSearchIndex = services.search.index() - self._indexes_created = [] # Make sure the services container is wired up with the newly created search container wire_container(self.services_container) - def record_index(self, name: str): - self.log.info(f"Recording index {name} for deletion") - self._indexes_created.append(name) - def close(self): - for index in self._indexes_created: - try: - self.log.info(f"Deleting index {index}") - self.client.indices.delete(index) - except Exception as e: - self.log.info(f"Failed to delete index {index}: {e}") - - # Force test index deletion - self.client.indices.delete("test_index*") - self.log.info("Waiting for operations to complete.") - self.client.indices.refresh() + # Delete our index prefix + self.client.indices.delete(f"{self.index_prefix}*") # Unwire the services container self.services_container.unwire() @@ -90,19 +80,30 @@ def default_work(self, *args, **kwargs): work.set_presentation_ready() return work - def init_indices(self): - self.index.initialize_indices() + @classmethod + @contextmanager + def fixture( + cls, db: DatabaseTransactionFixture, services: Services, test_id: TestIdFixture + ): + fixture = cls(db, services, test_id) + try: + yield fixture + finally: + fixture.close() @pytest.fixture(scope="function") def external_search_fixture( - db: DatabaseTransactionFixture, services_fixture: ServicesFixture + db: DatabaseTransactionFixture, + services_fixture: ServicesFixture, + function_test_id: TestIdFixture, ) -> Generator[ExternalSearchFixture, None, None]: """Ask for an external search system.""" """Note: You probably want EndToEndSearchFixture instead.""" - fixture = ExternalSearchFixture(db, services_fixture.services) - yield fixture - fixture.close() + with ExternalSearchFixture.fixture( + db, services_fixture.services, function_test_id + ) as fixture: + yield fixture class EndToEndSearchFixture: @@ -237,9 +238,11 @@ def _compare_hits(self, expect, hits, query_args, should_be_ordered=True, **kwar count = self.external_search_index.count_works(filter) assert count == len(expect) - def close(self): - for index in self.external_search_index.search_service().indexes_created(): - self.external_search.record_index(index) + @classmethod + @contextmanager + def fixture(cls, search_fixture: ExternalSearchFixture): + fixture = cls(search_fixture) + yield fixture @pytest.fixture(scope="function") @@ -247,9 +250,8 @@ def end_to_end_search_fixture( external_search_fixture: ExternalSearchFixture, ) -> Generator[EndToEndSearchFixture, None, None]: """Ask for an external search system that can be populated with data for end-to-end tests.""" - fixture = EndToEndSearchFixture(external_search_fixture) - yield fixture - fixture.close() + with EndToEndSearchFixture.fixture(external_search_fixture) as fixture: + yield fixture class ExternalSearchFixtureFake: @@ -269,15 +271,20 @@ def close(self): self.services.unwire() self.services.search.reset_override() + @classmethod + @contextmanager + def fixture(cls, db: DatabaseTransactionFixture, services: Services): + fixture = cls(db, services) + try: + yield fixture + finally: + fixture.close() + @pytest.fixture(scope="function") def external_search_fake_fixture( db: DatabaseTransactionFixture, services_fixture: ServicesFixture ) -> Generator[ExternalSearchFixtureFake, None, None]: """Ask for an external search system that can be populated with data for end-to-end tests.""" - fixture = ExternalSearchFixtureFake( - db=db, - services=services_fixture.services, - ) - yield fixture - fixture.close() + with ExternalSearchFixtureFake.fixture(db, services_fixture.services) as fixture: + yield fixture diff --git a/tests/fixtures/services.py b/tests/fixtures/services.py index 86603468f7..a38030bb58 100644 --- a/tests/fixtures/services.py +++ b/tests/fixtures/services.py @@ -201,7 +201,7 @@ def wired(self) -> Generator[None, None, None]: self.services.unwire() -@pytest.fixture(autouse=True) +@pytest.fixture def services_fixture( services_logging_fixture: ServicesLoggingFixture, services_storage_fixture: ServicesStorageFixture, diff --git a/tests/fixtures/webserver.py b/tests/fixtures/webserver.py index f1744a3b76..d06814d9b6 100644 --- a/tests/fixtures/webserver.py +++ b/tests/fixtures/webserver.py @@ -124,12 +124,12 @@ class MockAPIServer(LoggerMixin): def __init__(self, address: str, port: int): self._address = address - self._port = port self._server = MockAPIInternalServer( - (self._address, self._port), bind_and_activate=True + (self._address, port), bind_and_activate=True ) self._server.mock_api_server = self self._server_thread = threading.Thread(target=self._server.serve_forever) + self._port = self._server.socket.getsockname()[1] self._responses = {} self._requests = [] @@ -180,7 +180,7 @@ def requests(self) -> list[MockAPIServerRequest]: @pytest.fixture def mock_web_server() -> Generator[MockAPIServer, None, None]: """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) + _server = MockAPIServer("127.0.0.1", 0) _server.start() yield _server _server.stop() diff --git a/tests/manager/api/controller/test_profile.py b/tests/manager/api/controller/test_profile.py index ac04f3989a..467c84676f 100644 --- a/tests/manager/api/controller/test_profile.py +++ b/tests/manager/api/controller/test_profile.py @@ -17,7 +17,7 @@ class ProfileFixture(ControllerFixture): def __init__( self, db: DatabaseTransactionFixture, services_fixture: ServicesFixture ): - super().__init__(db, services_fixture, setup_cm=True) + super().__init__(db, services_fixture) # Nothing will happen to this patron. This way we can verify # that a patron can only see/modify their own profile. self.other_patron = db.patron() diff --git a/tests/manager/api/controller/test_scopedsession.py b/tests/manager/api/controller/test_scopedsession.py index 1cc62ea29f..b07e68493a 100644 --- a/tests/manager/api/controller/test_scopedsession.py +++ b/tests/manager/api/controller/test_scopedsession.py @@ -1,68 +1,62 @@ +from collections.abc import Generator from contextlib import contextmanager +from unittest.mock import MagicMock import flask +import pytest from sqlalchemy.orm import Session +from typing_extensions import Self -from palace.manager import api -from palace.manager.core.opds_import import OPDSAPI +from palace.manager.api.app import app, initialize_database from palace.manager.sqlalchemy.flask_sqlalchemy_session import current_session -from palace.manager.sqlalchemy.model.collection import Collection from palace.manager.sqlalchemy.model.datasource import DataSource from palace.manager.sqlalchemy.model.identifier import Identifier -from palace.manager.sqlalchemy.model.library import Library -from palace.manager.sqlalchemy.util import create -from tests.fixtures.api_controller import ( - ControllerFixture, - ControllerFixtureSetupOverrides, -) - - -class ScopedHolder: - """A scoped holder used to store some state in the test. This is necessary because - we want to do some unusual things with scoped sessions, and don't necessary have access - to a database transaction fixture in all of the various methods that will be called. - """ +from tests.fixtures.database import DatabaseFixture +from tests.fixtures.services import ServicesFixture +from tests.mocks.circulation import MockCirculationManager + + +class ScopedSessionFixture: + def __init__( + self, db_fixture: DatabaseFixture, services: ServicesFixture, session: Session + ): + self.db_fixture = db_fixture + self.session = session + self.services = services + self.app = app + with db_fixture.patch_engine(): + initialize_database() + self.app.manager = MockCirculationManager(app._db, services.services) + self.mock_library = MagicMock() + self.mock_library.has_root_lanes = False + + def _cleanup(self) -> None: + delattr(self.app, "manager") + delattr(self.app, "_db") + + @classmethod + @contextmanager + def fixture( + cls, db_fixture: DatabaseFixture, services_fixture: ServicesFixture + ) -> Generator[Self, None, None]: + with Session(db_fixture.connection) as session: + fixture = cls(db_fixture, services_fixture, session) + yield fixture + fixture._cleanup() - def __init__(self): - self.identifiers = 0 - - def fresh_id(self) -> str: - self.identifiers = self.identifiers + 1 - return str(self.identifiers) - - def make_default_libraries(self, session: Session): - libraries = [] - for i in range(2): - name = self.fresh_id() + " (library for scoped session)" - library, ignore = create( - session, - Library, - short_name=name, - public_key="x", - private_key=b"y", - settings_dict={ - "website": "https://library.com", - "help_web": "https://library.com/help", - }, - ) - libraries.append(library) - return libraries - - def make_default_collection(self, session: Session, library): - """We need to create a test collection that - uses the scoped session. - """ - collection, _ = Collection.by_name_and_protocol( - session, - self.fresh_id() + " (collection for scoped session)", - OPDSAPI.label(), - ) - settings = OPDSAPI.settings_class()( - external_account_id="http://url.com", data_source="OPDS" - ) - OPDSAPI.settings_update(collection.integration_configuration, settings) - library.collections.append(collection) - return collection + @contextmanager + def request_context(self, path: str) -> Generator[None, None, None]: + with self.app.test_request_context(path) as ctx: + ctx.request.library = self.mock_library # type: ignore[attr-defined] + yield + + +@pytest.fixture +def scoped_session_fixture( + function_database: DatabaseFixture, services_fixture: ServicesFixture +) -> Generator[ScopedSessionFixture, None, None]: + with ScopedSessionFixture.fixture(function_database, services_fixture) as fixture: + yield fixture class TestScopedSession: @@ -74,45 +68,17 @@ class TestScopedSession: the corresponding behavior in unit tests. """ - @contextmanager - def request_context_and_transaction( + def test_scoped_session( self, - scoped: ScopedHolder, - controller_fixture_without_cm: ControllerFixture, - *args + scoped_session_fixture: ScopedSessionFixture, ): - """Run a simulated Flask request in a transaction that gets rolled - back at the end of the request. - """ - - fixture = controller_fixture_without_cm - with fixture.app.test_request_context(*args) as ctx: - transaction = current_session.begin_nested() # type: ignore[attr-defined] - fixture.app.manager = fixture.circulation_manager_setup_with_session( - session=current_session, # type: ignore[arg-type] - overrides=ControllerFixtureSetupOverrides( - make_default_libraries=scoped.make_default_libraries, - make_default_collection=scoped.make_default_collection, - ), - ) - yield ctx - transaction.rollback() - - def test_scoped_session(self, controller_fixture_without_cm: ControllerFixture): - fixture = controller_fixture_without_cm - api.app.initialize_database() - - # Create a holder that carries some state for the purposes of testing - scoped = ScopedHolder() - # Start a simulated request to the Flask app server. - with self.request_context_and_transaction(scoped, fixture, "/"): + with scoped_session_fixture.request_context("/"): # Each request is given its own database session distinct - # from the one used by most unit tests or the one - # associated with the CirculationManager object. + # from the one used by most unit tests and the one created + # outside of this context. session1 = current_session() - assert session1 != fixture.db - assert session1 != fixture.app.manager._db + assert session1 != scoped_session_fixture.session # Add an Identifier to the database. identifier = Identifier(type=DataSource.GUTENBERG, identifier="1024") @@ -124,23 +90,22 @@ def test_scoped_session(self, controller_fixture_without_cm: ControllerFixture): [identifier] = session1.query(Identifier).all() assert "1024" == identifier.identifier - # It doesn't show up in fixture.db.session, the database session - # used by most other unit tests, because it was created - # within the (still-active) context of a Flask request, - # which happens within a nested database transaction. - assert [] == fixture.db.session.query(Identifier).all() + # It doesn't show up in a different session because + # the request is still in progress so its transaction + # hasn't been committed. + assert [] == scoped_session_fixture.session.query(Identifier).all() # It shows up in the flask_scoped_session object that # created the request-scoped session, because within the # context of a request, running database queries on that object # actually runs them against your request-scoped session. - [identifier] = fixture.app.manager._db.query(Identifier).all() + [identifier] = app.manager._db.query(Identifier).all() assert "1024" == identifier.identifier # We use the session context manager here to make sure # we don't keep a transaction open for this new session # once we are done with it. - with fixture.app.manager._db.session_factory() as new_session: + with app.manager._db.session_factory() as new_session: # But if we were to use flask_scoped_session to create a # brand new session, it would not see the Identifier, # because it's running in a different database session. @@ -149,44 +114,32 @@ def test_scoped_session(self, controller_fixture_without_cm: ControllerFixture): # When the index controller runs in the request context, # it doesn't store anything that's associated with the # scoped session. - flask.request.library = fixture.library # type: ignore - response = fixture.app.manager.index_controller() + response = app.manager.index_controller() assert 302 == response.status_code # Once we exit the context of the Flask request, the - # transaction is rolled back. The Identifier never actually - # enters the database. - # - # If it did enter the database, it would never leave. Changes - # that happen through self._db happen inside a nested - # transaction which is rolled back after the test is over. - # But changes that happen through a session-scoped database - # connection are actually written to the database when we - # leave the scope of the request. - # - # To avoid this, we use test_request_context_and_transaction - # to create a nested transaction that's rolled back just - # before we leave the scope of the request. - assert [] == fixture.db.session.query(Identifier).all() + # transaction is committed and the Identifier is written to the + # database. That is why we run this test with the function_database + # fixture, which gives us a function scoped database to work with. + # This database is removed after the test completes, so we don't + # have to worry about cleaning up the database after ourselves. + [identifier] = scoped_session_fixture.session.query(Identifier).all() + assert "1024" == identifier.identifier # Now create a different simulated Flask request - with self.request_context_and_transaction(scoped, fixture, "/"): + with scoped_session_fixture.request_context("/"): session2 = current_session() - assert session2 != fixture.db - assert session2 != fixture.app.manager._db + assert session2 != scoped_session_fixture.session + assert session2 != app.manager._db # The controller still works in the new request context - # nothing it needs is associated with the previous scoped # session. - flask.request.library = fixture.library # type: ignore - response = fixture.app.manager.index_controller() + flask.request.library = scoped_session_fixture.mock_library # type: ignore[attr-defined] + response = app.manager.index_controller() assert 302 == response.status_code # The two Flask requests got different sessions, neither of # which is the same as self._db, the unscoped database session # used by most other unit tests. assert session1 != session2 - - # Make sure that we close the connections for the scoped sessions. - session1.bind.dispose() - session2.bind.dispose() diff --git a/tests/manager/api/test_app.py b/tests/manager/api/test_app.py index b18f993147..d7d79874f1 100644 --- a/tests/manager/api/test_app.py +++ b/tests/manager/api/test_app.py @@ -10,9 +10,14 @@ def test_initialize_application_http( db: DatabaseTransactionFixture, services_fixture_wired: ServicesFixture ): # Use the db transaction fixture so that we don't use the production settings by mistake - with patch.object( - HTTP, "set_quick_failure_settings" - ) as mock_set_quick_failure_settings: + with ( + patch.object( + HTTP, "set_quick_failure_settings" + ) as mock_set_quick_failure_settings, + patch("palace.manager.api.app.initialize_database"), + patch("palace.manager.api.app.initialize_circulation_manager"), + patch("palace.manager.api.app.app"), + ): # Initialize the app, which will set the HTTP configuration initialize_application() diff --git a/tests/manager/api/test_scripts.py b/tests/manager/api/test_scripts.py index e3b8041cfc..afc26fea71 100644 --- a/tests/manager/api/test_scripts.py +++ b/tests/manager/api/test_scripts.py @@ -541,18 +541,18 @@ class TestInstanceInitializationScript: def test_run_locks_database(self, db: DatabaseTransactionFixture): # The script locks the database with a PostgreSQL advisory lock - with patch("palace.manager.scripts.SessionManager") as session_manager: - with patch("palace.manager.scripts.pg_advisory_lock") as advisory_lock: - script = InstanceInitializationScript() - script.initialize = MagicMock() - script.run() - - advisory_lock.assert_called_once_with( - session_manager.engine().begin().__enter__(), - LOCK_ID_DB_INIT, - ) - advisory_lock().__enter__.assert_called_once() - advisory_lock().__exit__.assert_called_once() + with patch("palace.manager.scripts.pg_advisory_lock") as advisory_lock: + mock_engine_factory = MagicMock() + script = InstanceInitializationScript(engine_factory=mock_engine_factory) + script.initialize = MagicMock() + script.run() + + advisory_lock.assert_called_once_with( + mock_engine_factory().begin().__enter__(), + LOCK_ID_DB_INIT, + ) + advisory_lock().__enter__.assert_called_once() + advisory_lock().__exit__.assert_called_once() def test_initialize(self, db: DatabaseTransactionFixture): # Test that the script inspects the database and initializes or migrates the database @@ -738,7 +738,7 @@ def export(self, _db, start, end): output = StringIO() cmd_args = ["--start=20190820", "--end=20190827"] exporter = MockLocalAnalyticsExporter() - script = LocalAnalyticsExportScript() + script = LocalAnalyticsExportScript(_db=db.session) script.do_run(output=output, cmd_args=cmd_args, exporter=exporter) assert "test" == output.getvalue() assert ["20190820", "20190827"] == exporter.called_with @@ -746,8 +746,8 @@ def export(self, _db, start, end): class TestGenerateShortTokenScript: @pytest.fixture - def script(self): - return GenerateShortTokenScript() + def script(self, db: DatabaseTransactionFixture): + return GenerateShortTokenScript(_db=db.session) @pytest.fixture def output(self): diff --git a/tests/manager/core/test_config.py b/tests/manager/core/test_config.py new file mode 100644 index 0000000000..1a3dcca8e2 --- /dev/null +++ b/tests/manager/core/test_config.py @@ -0,0 +1,41 @@ +import pytest +from sqlalchemy.exc import ArgumentError + +from palace.manager.core.config import CannotLoadConfiguration, Configuration +from palace.manager.service.logging.configuration import LogLevel + + +class TestConfiguration: + def test_database_url( + self, monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture + ): + # If the environment variable is not set, an exception is raised. + monkeypatch.delenv( + Configuration.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE, raising=False + ) + with pytest.raises(CannotLoadConfiguration) as exc1: + Configuration.database_url() + assert "Database URL was not defined" in str(exc1.value) + + # If the URL is not in the expected format, an exception is raised. + monkeypatch.setenv( + Configuration.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE, "bad-url" + ) + with pytest.raises(ArgumentError) as exc2: + Configuration.database_url() + assert "Bad format for database URL" in str(exc2.value) + + # Make sure database_url() returns the expected URL. + caplog.set_level(LogLevel.info) + expected_url = "postgresql://user:pass@palaceproject.io:1234/test_db" + monkeypatch.setenv( + Configuration.DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE, expected_url + ) + assert Configuration.database_url() == expected_url + assert "Connecting to database" in caplog.text + + # Make sure the password is hidden in the log. + assert "pass" not in caplog.text + + # But the rest of the URL is visible. + assert expected_url.replace("pass", "***") in caplog.text diff --git a/tests/manager/search/test_external_search.py b/tests/manager/search/test_external_search.py index d3a41e2144..8f6cd1a45d 100644 --- a/tests/manager/search/test_external_search.py +++ b/tests/manager/search/test_external_search.py @@ -1979,12 +1979,6 @@ def works(worklist, facets): debug=True, ) - def assert_featured(description, worklist, facets, expect): - # Generate a list of featured works for the given `worklist` - # and compare that list against `expect`. - actual = works(worklist, facets) - fixture.assert_works(description, expect, actual) - worklist = WorkList() worklist.initialize(transaction.default_library()) facets = FeaturedFacets(1, random_seed=Filter.DETERMINISTIC) @@ -2006,12 +2000,11 @@ def assert_featured(description, worklist, facets, expect): ) # The featured work appears above the non-featured work, # even though it's lower quality and is not available. - assert_featured( - "Works from WorkList based on CustomList", - best_sellers, - facets, - [data.featured_on_list, data.not_featured_on_list], - ) + expect = [data.featured_on_list, data.not_featured_on_list] + # Generate a list of featured works for the given `worklist` + # and compare that list against `expect`. + actual = works(best_sellers, facets) + fixture.assert_works("Works from WorkList based on CustomList", expect, actual) # By changing the minimum_featured_quality you can control # at what point a work is considered 'featured' -- at which @@ -2040,34 +2033,45 @@ def assert_featured(description, worklist, facets, expect): assert data.featured_on_list in last_two # Up to this point we've been avoiding the random element, - # but we can introduce that now by passing in a numeric seed. - # In normal usage, the current time is used as the seed. + # but we can introduce that now by letting the random_seed + # parameter be None. # # The random element is relatively small, so it mainly acts # to rearrange works whose scores were similar before. # - # The order of the works when using random depends on 4 things: - # - The seed - # - The id (work_id) - # - The index name - # - The shard id - # If any of those change the order of works in this result may change, - # and hence the order of works in this assert must also change - # E.g. If the index version changes from v5 to v6, this may affect the order of works queried - # Keeping everything else the same, the order of works will remain reproducible across test runs - random_facets = FeaturedFacets(1, random_seed=43) - assert_featured( - "Works permuted by a random seed", - worklist, - random_facets, - [ - data.hq_available_2, - data.hq_available, - data.default_quality, - data.hq_not_available, - data.not_featured_on_list, - data.featured_on_list, - ], + # In this test we have two groups of works -- one group is + # high-quality and available, the other group is low-quality. + # With the random element in play, the high-quality works will + # be randomly permuted among themselves, and the low-quality + # works will be randomly permuted among themselves. + # However, the high-quality works will always show up before + # the low-quality works. + random_facets = FeaturedFacets(1) + expect_high_quality = [ + data.hq_available_2, + data.hq_available, + ] + expect_low_quality = [ + data.default_quality, + data.hq_not_available, + data.not_featured_on_list, + data.featured_on_list, + ] + # Generate a list of featured works for the given `worklist` + # and compare that list against `expect`. + actual_random = works(worklist, random_facets) + assert len(actual_random) == 6 + fixture.assert_works( + "Works permuted by a random seed (high quality)", + expect_high_quality, + actual_random[:2], + should_be_ordered=False, + ) + fixture.assert_works( + "Works permuted by a random seed (low quality)", + expect_low_quality, + actual_random[2:], + should_be_ordered=False, ) diff --git a/tests/manager/search/test_migration_states.py b/tests/manager/search/test_migration_states.py index 12a5cbf22d..1f2e43232d 100644 --- a/tests/manager/search/test_migration_states.py +++ b/tests/manager/search/test_migration_states.py @@ -29,32 +29,24 @@ def test_initial_migration_case( self, external_search_fixture: ExternalSearchFixture ): fx = external_search_fixture - db = fx.db index = fx.index service = fx.service - # Ensure we are in the initial state, no test indices and pointer available - prefix = fx.search_config.index_prefix - all_indices = fx.client.indices.get("*") - for index_name in all_indices.keys(): - if prefix in index_name: - fx.client.indices.delete(index_name) - - # We cannot make any requests before we intitialize + # We cannot make any requests before we initialize with pytest.raises(Exception) as raised: index.query_works("") assert "index_not_found" in str(raised.value) - # When a new sytem comes up the first code to run is the InstanceInitailization script + # When a new sytem comes up the first code to run is the InstanceInitialization script # This preps the DB and the search indices/pointers InstanceInitializationScript().initialize_search_indexes() # Ensure we have created the index and pointers - new_index_name = index._revision.name_for_index(prefix) - empty_index_name = service._empty(prefix) + new_index_name = index._revision.name_for_index(fx.index_prefix) + empty_index_name = service._empty(fx.index_prefix) all_indices = fx.client.indices.get("*") - assert prefix in new_index_name + assert fx.index_prefix in new_index_name assert new_index_name in all_indices.keys() assert empty_index_name in all_indices.keys() assert fx.client.indices.exists_alias( diff --git a/tests/manager/search/test_service.py b/tests/manager/search/test_service.py index d0195dacd1..46117fa145 100644 --- a/tests/manager/search/test_service.py +++ b/tests/manager/search/test_service.py @@ -2,7 +2,6 @@ from palace.manager.search.document import LONG, SearchMappingDocument from palace.manager.search.revision import SearchSchemaRevision -from palace.manager.search.service import SearchServiceOpensearch1 from tests.fixtures.search import ExternalSearchFixture @@ -18,9 +17,6 @@ def mapping_document(self) -> SearchMappingDocument: return self.document -BASE_NAME = "base" - - class TestService: """ Tests to verify that the Opensearch service implementation has the semantics we expect. @@ -30,89 +26,75 @@ def test_create_empty_idempotent( self, external_search_fixture: ExternalSearchFixture ): """Creating the empty index is idempotent.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service service.create_empty_index() - - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-empty") - service.create_empty_index() indices = external_search_fixture.client.indices.client.indices assert indices is not None - assert indices.exists("base-empty") + assert indices.exists(f"{external_search_fixture.index_prefix}-empty") def test_create_index_idempotent( self, external_search_fixture: ExternalSearchFixture ): """Creating any index is idempotent.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service revision = BasicMutableRevision(23) service.index_create(revision) service.index_create(revision) - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-v23") - indices = external_search_fixture.client.indices.client.indices assert indices is not None - assert indices.exists(revision.name_for_index("base")) + assert indices.exists( + revision.name_for_index(external_search_fixture.index_prefix) + ) def test_read_pointer_none(self, external_search_fixture: ExternalSearchFixture): """The read pointer is initially unset.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) - assert None == service.read_pointer() + service = external_search_fixture.service + assert service.read_pointer() is None def test_write_pointer_none(self, external_search_fixture: ExternalSearchFixture): """The write pointer is initially unset.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) - assert None == service.write_pointer() + service = external_search_fixture.service + assert service.write_pointer() is None def test_read_pointer_set(self, external_search_fixture: ExternalSearchFixture): """Setting the read pointer works.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service revision = BasicMutableRevision(23) service.index_create(revision) - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-v23") - service.read_pointer_set(revision) - assert "base-v23" == service.read_pointer() + assert service.read_pointer() == f"{external_search_fixture.index_prefix}-v23" def test_read_pointer_set_empty( self, external_search_fixture: ExternalSearchFixture ): """Setting the read pointer to the empty index works.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service service.create_empty_index() - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-empty") - service.read_pointer_set_empty() - assert "base-empty" == service.read_pointer() + assert service.read_pointer() == f"{external_search_fixture.index_prefix}-empty" def test_write_pointer_set(self, external_search_fixture: ExternalSearchFixture): """Setting the write pointer works.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service revision = BasicMutableRevision(23) service.index_create(revision) - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-v23") - service.write_pointer_set(revision) pointer = service.write_pointer() assert pointer is not None - assert "base-v23" == pointer.target_name + assert pointer.target_name == f"{external_search_fixture.index_prefix}-v23" def test_populate_index_idempotent( self, external_search_fixture: ExternalSearchFixture ): """Populating an index is idempotent.""" - service = SearchServiceOpensearch1(external_search_fixture.client, BASE_NAME) + service = external_search_fixture.service revision = BasicMutableRevision(23) mappings = revision.mapping_document() @@ -145,14 +127,20 @@ def test_populate_index_idempotent( service.index_create(revision) - # Log the index so that the fixture cleans it up afterward. - external_search_fixture.record_index("base-v23") - service.index_submit_documents("base-v23", documents) - service.index_submit_documents("base-v23", documents) + service.index_submit_documents( + f"{external_search_fixture.index_prefix}-v23", documents + ) + service.index_submit_documents( + f"{external_search_fixture.index_prefix}-v23", documents + ) indices = external_search_fixture.client.indices.client.indices assert indices is not None - assert indices.exists(revision.name_for_index("base")) - assert indices.get(revision.name_for_index("base"))["base-v23"]["mappings"] == { + assert indices.exists( + revision.name_for_index(external_search_fixture.index_prefix) + ) + assert indices.get( + revision.name_for_index(external_search_fixture.index_prefix) + )[f"{external_search_fixture.index_prefix}-v23"]["mappings"] == { "properties": mappings.serialize_properties() } diff --git a/tests/manager/sqlalchemy/test_session.py b/tests/manager/sqlalchemy/test_session.py new file mode 100644 index 0000000000..0672879f57 --- /dev/null +++ b/tests/manager/sqlalchemy/test_session.py @@ -0,0 +1,77 @@ +from unittest.mock import patch + +from palace.manager.core.config import Configuration +from palace.manager.sqlalchemy.model.coverage import Timestamp +from palace.manager.sqlalchemy.session import ( + SessionManager, + json_serializer, + production_session, +) +from palace.manager.sqlalchemy.util import get_one +from tests.fixtures.database import DatabaseTransactionFixture + + +class TestSessionManager: + def test_initialize_data_does_not_reset_timestamp( + self, db: DatabaseTransactionFixture + ): + # initialize_data() has already been called, so the database is + # initialized and the 'site configuration changed' Timestamp has + # been set. Calling initialize_data() again won't change the + # date on the timestamp. + timestamp = get_one( + db.session, + Timestamp, + collection=None, + service=Configuration.SITE_CONFIGURATION_CHANGED, + ) + assert timestamp is not None + old_timestamp = timestamp.finish + SessionManager.initialize_data(db.session) + assert old_timestamp == timestamp.finish + + @patch("palace.manager.sqlalchemy.session.create_engine") + @patch.object(Configuration, "database_url") + def test_engine(self, mock_database_url, mock_create_engine): + expected_args = { + "echo": False, + "json_serializer": json_serializer, + "pool_pre_ping": True, + "poolclass": None, + } + + # If a URL is passed in, it's used. + SessionManager.engine("url") + mock_database_url.assert_not_called() + mock_create_engine.assert_called_once_with("url", **expected_args) + mock_create_engine.reset_mock() + + # If no URL is passed in, the URL from the configuration is used. + SessionManager.engine() + mock_database_url.assert_called_once() + mock_create_engine.assert_called_once_with( + mock_database_url.return_value, **expected_args + ) + + @patch.object(SessionManager, "engine") + @patch.object(SessionManager, "session_from_connection") + def test_session(self, mock_session_from_connection, mock_engine): + session = SessionManager.session("test-url") + mock_engine.assert_called_once_with("test-url") + mock_engine.return_value.connect.assert_called_once() + mock_session_from_connection.assert_called_once_with( + mock_engine.return_value.connect.return_value + ) + assert session == mock_session_from_connection.return_value + + +@patch.object(SessionManager, "session") +@patch.object(Configuration, "database_url") +def test_production_session(mock_database_url, mock_session): + # Make sure production_session() calls session() with the URL from the + # configuration. + mock_database_url.return_value = "test-url" + session = production_session() + mock_database_url.assert_called_once() + mock_session.assert_called_once_with("test-url", initialize_data=True) + assert session == mock_session.return_value diff --git a/tests/manager/sqlalchemy/test_util.py b/tests/manager/sqlalchemy/test_util.py index 483b200148..17f78bcc7b 100644 --- a/tests/manager/sqlalchemy/test_util.py +++ b/tests/manager/sqlalchemy/test_util.py @@ -3,10 +3,7 @@ from sqlalchemy import not_ from sqlalchemy.orm.exc import MultipleResultsFound -from palace.manager.core.config import Configuration -from palace.manager.sqlalchemy.model.coverage import Timestamp from palace.manager.sqlalchemy.model.edition import Edition -from palace.manager.sqlalchemy.session import SessionManager from palace.manager.sqlalchemy.util import ( get_one, numericrange_to_tuple, @@ -47,24 +44,6 @@ def test_get_one(self, db: DatabaseTransactionFixture): result = get_one(db.session, Edition, constraint=constraint) assert None == result - def test_initialize_data_does_not_reset_timestamp( - self, db: DatabaseTransactionFixture - ): - # initialize_data() has already been called, so the database is - # initialized and the 'site configuration changed' Timestamp has - # been set. Calling initialize_data() again won't change the - # date on the timestamp. - timestamp = get_one( - db.session, - Timestamp, - collection=None, - service=Configuration.SITE_CONFIGURATION_CHANGED, - ) - assert timestamp is not None - old_timestamp = timestamp.finish - SessionManager.initialize_data(db.session) - assert old_timestamp == timestamp.finish - class TestNumericRangeConversion: """Test the helper functions that convert between tuples and NumericRange diff --git a/tests/migration/conftest.py b/tests/migration/conftest.py index ec79f15463..11d9053e39 100644 --- a/tests/migration/conftest.py +++ b/tests/migration/conftest.py @@ -12,7 +12,7 @@ from pytest_alembic.config import Config from palace.manager.sqlalchemy.session import json_serializer -from tests.fixtures.database import ApplicationFixture, DatabaseFixture +from tests.fixtures.database import DatabaseFixture from tests.fixtures.services import ServicesFixture if TYPE_CHECKING: @@ -21,60 +21,41 @@ from sqlalchemy.engine import Connection, Engine -pytest_plugins = [ - "tests.fixtures.services", -] - - -@pytest.fixture(scope="function") -def application( - services_fixture: ServicesFixture, -) -> Generator[ApplicationFixture, None, None]: - app = ApplicationFixture.create() - yield app - app.close() - - -@pytest.fixture(scope="function") -def database(application: ApplicationFixture) -> Generator[DatabaseFixture, None, None]: - # This is very similar to the normal database fixture and uses the same object, - # but because these tests are done outside a transaction, we need this fixture - # to have function scope, so the database schema is completely reset between - # tests. - db = DatabaseFixture.create() - yield db - db.close() +@pytest.fixture +def alembic_config_path() -> Path: + return Path(__file__).parent.parent.parent.absolute() / "alembic.ini" @pytest.fixture -def alembic_config() -> Config: +def alembic_config(alembic_config_path: Path) -> Config: """ Use an explicit path to the alembic config file. This lets us run pytest from a different directory than the root of the project. """ - return Config( - config_options={ - "file": str(Path(__file__).parent.parent.parent.absolute() / "alembic.ini") - } - ) + return Config(config_options={"file": str(alembic_config_path)}) @pytest.fixture -def alembic_engine(database: DatabaseFixture) -> Engine: +def alembic_engine(function_database: DatabaseFixture) -> Engine: """ Override this fixture to provide pytest-alembic powered tests with a database handle. """ - return database._engine + return function_database.engine @pytest.fixture def alembic_runner( alembic_config: dict[str, Any] | alembic.config.Config | Config, alembic_engine: Engine, + services_fixture: ServicesFixture, ) -> Generator[MigrationContext, None, None]: """ Override this fixture to make sure that we stamp head. Since this is how out database is initialized. The normal fixtures assume you start from an empty database. + + This fixture also includes the services_fixture fixture which is used to mock out + the services container. This is done because some of the migrations require the services + container to be initialized. """ config = Config.from_raw_config(alembic_config) with pytest_alembic.runner(config=config, engine=alembic_engine) as runner: diff --git a/tests/migration/test_instance_init_script.py b/tests/migration/test_instance_init_script.py index 217df1e519..24a860a4eb 100644 --- a/tests/migration/test_instance_init_script.py +++ b/tests/migration/test_instance_init_script.py @@ -1,28 +1,79 @@ import logging +import multiprocessing import sys +from collections.abc import Generator +from contextlib import contextmanager from io import StringIO -from multiprocessing import Process +from pathlib import Path from unittest.mock import MagicMock, Mock +import pytest from pytest_alembic import MigrationContext from sqlalchemy import inspect from sqlalchemy.engine import Engine +from typing_extensions import Self from palace.manager.scripts import InstanceInitializationScript from palace.manager.sqlalchemy.session import SessionManager -from tests.fixtures.database import ApplicationFixture -from tests.fixtures.services import mock_services_container - - -def _run_script() -> None: +from tests.fixtures.database import DatabaseFixture +from tests.fixtures.services import ServicesFixture, mock_services_container + + +class InstanceInitScriptFixture: + def __init__( + self, + function_database: DatabaseFixture, + services_fixture: ServicesFixture, + alembic_config_path: Path, + ): + self.database = function_database + self.services = services_fixture + self.alembic_config_path = alembic_config_path + + def script(self) -> InstanceInitializationScript: + with self.database.patch_engine(): + return InstanceInitializationScript( + config_file=self.alembic_config_path, + ) + + @classmethod + @contextmanager + def fixture( + cls, + function_database: DatabaseFixture, + services_fixture: ServicesFixture, + alembic_config_path: Path, + ) -> Generator[Self, None, None]: + fixture = cls(function_database, services_fixture, alembic_config_path) + yield fixture + + +@pytest.fixture +def instance_init_script_fixture( + function_database: DatabaseFixture, + services_fixture: ServicesFixture, + alembic_config_path: Path, +) -> Generator[InstanceInitScriptFixture, None, None]: + with InstanceInitScriptFixture.fixture( + function_database, services_fixture, alembic_config_path + ) as fixture: + yield fixture + + +def _run_script(config_path: Path, db_url: str) -> None: try: # Capturing the log output stream = StringIO() logging.basicConfig(stream=stream, level=logging.INFO, force=True) + def engine_factory() -> Engine: + return SessionManager.engine(db_url) + mock_services = MagicMock() - with mock_services_container(mock_services): - script = InstanceInitializationScript() + with (mock_services_container(mock_services),): + script = InstanceInitializationScript( + config_file=config_path, engine_factory=engine_factory + ) script.run() # Set our exit code to the number of upgrades we ran @@ -34,17 +85,27 @@ def _run_script() -> None: sys.exit(-1) -def test_locking(alembic_runner: MigrationContext, alembic_engine: Engine) -> None: +def test_locking( + alembic_runner: MigrationContext, + alembic_config_path: Path, + instance_init_script_fixture: InstanceInitScriptFixture, +) -> None: # Migrate to the initial revision alembic_runner.migrate_down_to("base") + db_url = instance_init_script_fixture.database.database_name.url # Spawn three processes, that will all try to migrate to head # at the same time. One of them should do the migration, and # the other two should wait, then do no migration since it # has already been done. - p1 = Process(target=_run_script) - p2 = Process(target=_run_script) - p3 = Process(target=_run_script) + mp_ctx = multiprocessing.get_context("spawn") + process_kwargs = { + "config_path": alembic_config_path, + "db_url": db_url, + } + p1 = mp_ctx.Process(target=_run_script, kwargs=process_kwargs) + p2 = mp_ctx.Process(target=_run_script, kwargs=process_kwargs) + p3 = mp_ctx.Process(target=_run_script, kwargs=process_kwargs) p1.start() p2.start() @@ -67,17 +128,17 @@ def test_locking(alembic_runner: MigrationContext, alembic_engine: Engine) -> No assert exit_codes[2] == 0 -def test_initialize(application: ApplicationFixture) -> None: - # Run the script and make sure we create the alembic_version table - - application.drop_existing_schema() +def test_initialize(instance_init_script_fixture: InstanceInitScriptFixture) -> None: + # Drop any existing schema + instance_init_script_fixture.database.drop_existing_schema() - engine = SessionManager.engine() + # Run the script and make sure we create the alembic_version table + engine = instance_init_script_fixture.database.engine inspector = inspect(engine) assert "alembic_version" not in inspector.get_table_names() assert len(inspector.get_table_names()) == 0 - script = InstanceInitializationScript() + script = instance_init_script_fixture.script() script.initialize_database = Mock(wraps=script.initialize_database) script.migrate_database = Mock(wraps=script.migrate_database) script.run() @@ -97,7 +158,10 @@ def test_initialize(application: ApplicationFixture) -> None: assert script.migrate_database.call_count == 1 -def test_migrate(alembic_runner: MigrationContext) -> None: +def test_migrate( + alembic_runner: MigrationContext, + instance_init_script_fixture: InstanceInitScriptFixture, +) -> None: # Run the script and make sure we create the alembic_version table # Migrate to the initial revision alembic_runner.migrate_down_to("base") @@ -106,7 +170,7 @@ def test_migrate(alembic_runner: MigrationContext) -> None: assert alembic_runner.current == "base" assert alembic_runner.current != alembic_runner.heads[0] - script = InstanceInitializationScript() + script = instance_init_script_fixture.script() script.initialize_database = Mock(wraps=script.initialize_database) script.migrate_database = Mock(wraps=script.migrate_database) script.run() diff --git a/tests/mocks/search.py b/tests/mocks/search.py index 7e9f596f8f..0284287d9c 100644 --- a/tests/mocks/search.py +++ b/tests/mocks/search.py @@ -35,7 +35,6 @@ class SearchServiceFake(SearchService): _failing: SearchServiceFailureMode _search_client: Search _multi_search_client: MultiSearch - _indexes_created: list[str] _document_submission_attempts: list[dict] def __init__(self): @@ -46,7 +45,6 @@ def __init__(self): self._write_pointer: SearchWritePointer | None = None self._search_client = Search(using=MagicMock()) self._multi_search_client = MultiSearch(using=MagicMock()) - self._indexes_created = [] self._document_submission_attempts = [] @property @@ -57,9 +55,6 @@ def base_revision_name(self) -> str: def document_submission_attempts(self) -> list[dict]: return self._document_submission_attempts - def indexes_created(self) -> list[str]: - return self._indexes_created - def _fail_if_necessary(self): if self._failing == SearchServiceFailureMode.FAIL_ENTIRELY: raise OpenSearchException("Search index is on fire.") @@ -106,7 +101,6 @@ def write_pointer(self) -> SearchWritePointer | None: def create_empty_index(self) -> None: self._fail_if_necessary() - self._indexes_created.append(f"{self.base_name}-empty") return None def read_pointer_set(self, revision: SearchSchemaRevision) -> None: @@ -122,7 +116,6 @@ def read_pointer_set_empty(self) -> None: def index_create(self, revision: SearchSchemaRevision) -> None: self._fail_if_necessary() - self._indexes_created.append(revision.name_for_index(self.base_name)) return None def index_is_populated(self, revision: SearchSchemaRevision) -> bool: diff --git a/tox.ini b/tox.ini index 70161a18ba..5879a8f1a9 100644 --- a/tox.ini +++ b/tox.ini @@ -7,14 +7,12 @@ commands_pre = poetry install --without ci --sync -v python -m textblob.download_corpora commands = - pytest {posargs:tests/manager} + pytest {posargs:tests} passenv = - SIMPLIFIED_* PALACE_* CI setenv = - COVERAGE_FILE = .coverage.{envname} - docker: SIMPLIFIED_TEST_DATABASE=postgresql://simplified_test:test@localhost:9005/simplified_circulation_test + docker: PALACE_TEST_DATABASE_URL=postgresql://palace:test@localhost:9005 docker: PALACE_TEST_SEARCH_URL=http://localhost:9007 docker: PALACE_TEST_MINIO_ENDPOINT_URL=http://localhost:9004 docker: PALACE_TEST_MINIO_USER=palace @@ -28,12 +26,6 @@ allowlist_externals = poetry pytest -[testenv:migration-docker] -commands = - pytest {posargs:tests/migration} -docker = - docker: db-circ - [testenv:report] skip_install = true commands = @@ -46,9 +38,8 @@ allowlist_externals = [docker:db-circ] image = postgres:12 environment = - POSTGRES_USER=simplified_test + POSTGRES_USER=palace POSTGRES_PASSWORD=test - POSTGRES_DB=simplified_circulation_test ports = 9005:5432/tcp healthcheck_cmd = pg_isready diff --git a/verbose-test b/verbose-test deleted file mode 100755 index 4edfbf651b..0000000000 --- a/verbose-test +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -TESTING=true PYTHONWARNINGS=ignore pytest -v tests