Skip to content

Commit

Permalink
Tests updated
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathangreen committed May 15, 2024
1 parent 9ae9f79 commit 43a77bc
Show file tree
Hide file tree
Showing 10 changed files with 77 additions and 460 deletions.
4 changes: 2 additions & 2 deletions src/palace/manager/api/admin/controller/custom_lists.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,9 +291,9 @@ def _create_or_update_list(
# so the upstream counts can be calculated correctly.
documents = Work.to_search_documents(
self._db,
[w.id for w in works_to_update_in_search],
[w.id for w in works_to_update_in_search if w.id is not None],
)
# TODO: Does this need to be done here, or can this be done in the task?
# TODO: Does this need to be done here, or can this be done asynchronously?
self.search_engine.add_documents(documents)
self.search_engine.search_service().refresh()

Expand Down
13 changes: 8 additions & 5 deletions src/palace/manager/scripts/initialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

from palace.manager.celery.tasks.search import do_migration
from palace.manager.search.revision import SearchSchemaRevision
from palace.manager.search.service import SearchService
from palace.manager.search.service import SearchService, SearchWritePointer
from palace.manager.service.container import container_instance
from palace.manager.sqlalchemy.session import SessionManager
from palace.manager.sqlalchemy.util import LOCK_ID_DB_INIT, pg_advisory_lock
Expand Down Expand Up @@ -112,9 +112,11 @@ def create_search_index(

@classmethod
def migrate_search(
cls, service: SearchService, revision: SearchSchemaRevision
cls,
service: SearchService,
revision: SearchSchemaRevision,
write_pointer: SearchWritePointer,
) -> None:
write_pointer = service.write_pointer()
if write_pointer.version != revision.version:
# The revision is not the most recent. We need to create a new index.
# and start reindexing our data into it asynchronously. When the reindex
Expand All @@ -137,15 +139,16 @@ def initialize_search(self) -> None:
service = self._container.search.service()
revision_directory = self._container.search.revision_directory()
revision = revision_directory.highest()
write_pointer = service.write_pointer()

if not service.write_pointer():
if write_pointer is None:
# A write pointer does not exist. This is a fresh index.
self.log.info("Search index does not exist. Creating a new index.")
self.create_search_index(service, revision)
else:
# The index already exists. We need to check if the revision is the most recent.
self.log.info("Search index exists. Checking for search migrations.")
self.migrate_search(service, revision)
self.migrate_search(service, revision, write_pointer)
self.log.info("Search initialization complete.")

def run(self) -> None:
Expand Down
5 changes: 4 additions & 1 deletion src/palace/manager/sqlalchemy/model/work.py
Original file line number Diff line number Diff line change
Expand Up @@ -1741,7 +1741,10 @@ def target_age_query(self, foreign_work_id_field):
def to_search_document(self) -> dict[str, Any]:
"""Generate a search document for this Work."""
db = Session.object_session(self)
return Work.to_search_documents(db, [self.id])[0]
if self.id is not None:
return Work.to_search_documents(db, [self.id])[0]
else:
return {}

Check warning on line 1747 in src/palace/manager/sqlalchemy/model/work.py

View check run for this annotation

Codecov / codecov/patch

src/palace/manager/sqlalchemy/model/work.py#L1747

Added line #L1747 was not covered by tests

def mark_licensepools_as_superceded(self):
"""Make sure that all but the single best open-access LicensePool for
Expand Down
4 changes: 2 additions & 2 deletions tests/fixtures/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,7 @@ def clear(self):
def disable_fixture(self):
self.patch.stop()

def is_queued(self, work: int | Work, clear: bool = False) -> bool:
def is_queued(self, work: int | Work, *, clear: bool = False) -> bool:
if isinstance(work, Work):
work_id = work.id
else:
Expand All @@ -343,6 +343,6 @@ def fixture(cls):


@pytest.fixture(scope="function")
def work_external_indexing() -> WorkExternalIndexingFixture:
def work_external_indexing() -> Generator[WorkExternalIndexingFixture, None, None]:
with WorkExternalIndexingFixture.fixture() as fixture:
yield fixture
27 changes: 23 additions & 4 deletions tests/manager/scripts/test_initialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,19 +124,38 @@ def test__get_alembic_config(self, db: DatabaseTransactionFixture):
)
assert conf.config_file_name == str(test_ini.resolve())

def test_initialize_search_indexes(
self, external_search_fixture: ExternalSearchFixture
):
def test_initialize_search(self, external_search_fixture: ExternalSearchFixture):
service = external_search_fixture.service
base_name = service.base_revision_name

script = InstanceInitializationScript()
script.create_search_index = MagicMock(wraps=script.create_search_index)
script.migrate_search = MagicMock(wraps=script.migrate_search)

# Initially this should not exist, if the search service hasn't been initialized
assert service.read_pointer() is None
assert service.write_pointer() is None

# Do the initial search index creation
script.initialize_search()

# We should have created the search index, and not tried to migrate it,
# since we know its a new index.
script.create_search_index.assert_called_once()
script.migrate_search.assert_not_called()

# Then we have the latest version index
assert base_name in service.read_pointer()
read_pointer = service.read_pointer()
assert read_pointer is not None
assert base_name in read_pointer

write_pointer = service.write_pointer()
assert write_pointer is not None
assert base_name in write_pointer.name

# Now we try to initialize the search index again, and we should not create a new index
script.create_search_index.reset_mock()
script.initialize_search()

script.create_search_index.assert_not_called()
script.migrate_search.assert_called_once()
64 changes: 15 additions & 49 deletions tests/manager/scripts/test_search.py
Original file line number Diff line number Diff line change
@@ -1,57 +1,23 @@
from __future__ import annotations

import random
from unittest.mock import patch

from palace.manager.scripts.search import RebuildSearchIndexScript
from palace.manager.sqlalchemy.model.coverage import WorkCoverageRecord
from tests.fixtures.database import DatabaseTransactionFixture
from tests.fixtures.search import ExternalSearchFixtureFake


class TestRebuildSearchIndexScript:
def test_do_run(
self,
db: DatabaseTransactionFixture,
external_search_fake_fixture: ExternalSearchFixtureFake,
):
index = external_search_fake_fixture.external_search
work = db.work(with_license_pool=True)
work2 = db.work(with_license_pool=True)
wcr = WorkCoverageRecord
decoys = [wcr.QUALITY_OPERATION, wcr.SUMMARY_OPERATION]

# Set up some coverage records.
for operation in decoys + [wcr.UPDATE_SEARCH_INDEX_OPERATION]:
for w in (work, work2):
wcr.add_for(w, operation, status=random.choice(wcr.ALL_STATUSES))

coverage_qu = db.session.query(wcr).filter(
wcr.operation == wcr.UPDATE_SEARCH_INDEX_OPERATION
)
original_coverage = [x.id for x in coverage_qu]

# Run the script.
script = RebuildSearchIndexScript(db.session, search_index_client=index)
[progress] = script.do_run()

# The mock methods were called with the values we expect.
assert {work.id, work2.id} == set(
map(
lambda d: d["_id"], external_search_fake_fixture.service.documents_all()
)
)

# The script returned a list containing a single
# CoverageProviderProgress object containing accurate
# information about what happened (from the CoverageProvider's
# point of view).
assert (
"Items processed: 2. Successes: 2, transient failures: 0, persistent failures: 0"
== progress.achievements
)

# The old WorkCoverageRecords for the works were deleted. Then
# the CoverageProvider did its job and new ones were added.
new_coverage = [x.id for x in coverage_qu]
assert 2 == len(new_coverage)
assert set(new_coverage) != set(original_coverage)
def test_do_run(self, db: DatabaseTransactionFixture):
# If we are called with no arguments, we default to asynchronously rebuilding the search index.
with patch(
"palace.manager.scripts.search.search_reindex"
) as mock_search_reindex:
RebuildSearchIndexScript(db.session).do_run()
mock_search_reindex.delay.assert_called_once_with()

# If we are called with the --blocking argument, we rebuild the search index synchronously.
with patch(
"palace.manager.scripts.search.search_reindex"
) as mock_search_reindex:
RebuildSearchIndexScript(db.session, cmd_args=["--blocking"]).do_run()
mock_search_reindex.assert_called_once_with()
111 changes: 0 additions & 111 deletions tests/manager/search/test_migration_states.py

This file was deleted.

Loading

0 comments on commit 43a77bc

Please sign in to comment.