Skip to content

Commit

Permalink
Add a redis service
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathangreen committed Jun 12, 2024
1 parent d0c6e21 commit 54953c4
Show file tree
Hide file tree
Showing 26 changed files with 896 additions and 8 deletions.
15 changes: 14 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ grant all privileges on database circ to palace;

### Redis

Redis is used as the broker for Celery. You can run Redis with docker using the following command:
Redis is used as the broker for Celery and the caching layer. You can run Redis with docker using the following command:

```sh
docker run -d --name redis -p 6379:6379 redis
Expand Down Expand Up @@ -193,6 +193,19 @@ We support overriding a number of other Celery settings via environment variable
the defaults should be sufficient. The full list of settings can be found in
[`service/celery/configuration.py`](src/palace/manager/service/celery/configuration.py).

#### Redis

We use Redis as the caching layer for the application. Although you can use the same redis database for both
Celery and caching, we recommend that you use a separate database for each purpose to avoid conflicts.

- `PALACE_REDIS_URL`: The URL of the Redis instance to use for caching. (**required**).
- for example:
```sh
export PALACE_REDIS_URL="redis://localhost:6379/1"
```
- `PALACE_REDIS_KEY_PREFIX`: The prefix to use for keys stored in the Redis instance. The default is `palace`.
This is useful if you want to use the same Redis database for multiple CM (optional).

#### General

- `PALACE_BASE_URL`: The base URL of the application. Used to create absolute links. (optional)
Expand Down
2 changes: 2 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,15 @@ x-cm-variables: &cm
PALACE_CELERY_BROKER_URL: "redis://redis:6379/0"
PALACE_CELERY_BROKER_TRANSPORT_OPTIONS_GLOBAL_KEYPREFIX: "test"
PALACE_CELERY_CLOUDWATCH_STATISTICS_DRYRUN: "true"
PALACE_REDIS_URL: "redis://redis:6379/1"

# Set up the environment variables used for testing as well
PALACE_TEST_DATABASE_URL: "postgresql://palace:test@pg:5432/circ"
PALACE_TEST_SEARCH_URL: "http://os:9200"
PALACE_TEST_MINIO_URL: "http://minio:9000"
PALACE_TEST_MINIO_USER: "palace"
PALACE_TEST_MINIO_PASSWORD: "test123456789"
PALACE_TEST_REDIS_URL: "redis://redis:6379/2"
depends_on:
pg:
condition: service_healthy
Expand Down
27 changes: 20 additions & 7 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,7 @@ python-dateutil = "2.9.0.post0"
python3-saml = "^1.16" # python-saml is required for SAML authentication
pytz = "^2023.3"
pyyaml = "^6.0"
redis = "^5.0.5"
redmail = "^0.6.0"
requests = "^2.29"
sqlalchemy = {version = "^1.4", extras = ["mypy"]}
Expand Down Expand Up @@ -308,6 +309,7 @@ types-Pillow = "^10.0.0"
types-psycopg2 = "^2.9.21"
types-python-dateutil = "^2.8.19"
types-pytz = "^2024.1"
types-redis = "^4.6.0.20240425"
types-requests = "^2.28.11"

[tool.poetry.group.pg]
Expand Down
183 changes: 183 additions & 0 deletions src/palace/manager/celery/tasks/patron_activity.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
# def sync_bookshelf(
# self, patron: Patron, pin: str | None, force: bool = False
# ) -> tuple[list[Loan] | Query[Loan], list[Hold] | Query[Hold]]:
# """Sync our internal model of a patron's bookshelf with any external
# vendors that provide books to the patron's library.
#
# :param patron: A Patron.
# :param pin: The password authenticating the patron; used by some vendors
# that perform a cross-check against the library ILS.
# :param force: If this is True, the method will call out to external
# vendors even if it looks like the system has up-to-date information
# about the patron.
# """
# # Get our internal view of the patron's current state.
# local_loans = self.local_loans(patron)
# local_holds = self.local_holds(patron)
#
# if patron and patron.last_loan_activity_sync and not force:
# # Our local data is considered fresh, so we can return it
# # without calling out to the vendor APIs.
# return local_loans, local_holds
#
# # Assuming everything goes well, we will set
# # Patron.last_loan_activity_sync to this value -- the moment
# # just before we started contacting the vendor APIs.
# last_loan_activity_sync: datetime.datetime | None = utc_now()
#
# # Update the external view of the patron's current state.
# remote_loans, remote_holds, complete = self.patron_activity(patron, pin)
# __transaction = self._db.begin_nested()
#
# if not complete:
# # We were not able to get a complete picture of the
# # patron's loan activity. Until we are able to do that, we
# # should never assume that our internal model of the
# # patron's loans is good enough to cache.
# last_loan_activity_sync = None
#
# now = utc_now()
# local_loans_by_identifier = {}
# local_holds_by_identifier = {}
# for l in local_loans:
# if not l.license_pool:
# self.log.error("Active loan with no license pool!")
# continue
# i = l.license_pool.identifier
# if not i:
# self.log.error(
# "Active loan on license pool %s, which has no identifier!",
# l.license_pool,
# )
# continue
# key = (i.type, i.identifier)
# local_loans_by_identifier[key] = l
# for h in local_holds:
# if not h.license_pool:
# self.log.error("Active hold with no license pool!")
# continue
# i = h.license_pool.identifier
# if not i:
# self.log.error(
# "Active hold on license pool %r, which has no identifier!",
# h.license_pool,
# )
# continue
# key = (i.type, i.identifier)
# local_holds_by_identifier[key] = h
#
# active_loans = []
# active_holds = []
# start: datetime.datetime | None
# end: datetime.datetime | None
# for loan in remote_loans:
# # This is a remote loan. Find or create the corresponding
# # local loan.
# pool = loan.license_pool(self._db)
# start = loan.start_date
# end = loan.end_date
# key = (loan.identifier_type, loan.identifier)
# if key in local_loans_by_identifier:
# # We already have the Loan object, we don't need to look
# # it up again.
# local_loan = local_loans_by_identifier[key]
#
# # But maybe the remote's opinions as to the loan's
# # start or end date have changed.
# if start:
# local_loan.start = start
# if end:
# local_loan.end = end
# else:
# local_loan, new = pool.loan_to(patron, start, end)
#
# if loan.locked_to:
# # The loan source is letting us know that the loan is
# # locked to a specific delivery mechanism. Even if
# # this is the first we've heard of this loan,
# # it may have been created in another app or through
# # a library-website integration.
# loan.locked_to.apply(local_loan, autocommit=False)
# active_loans.append(local_loan)
#
# # Check the local loan off the list we're keeping so we
# # don't delete it later.
# key = (loan.identifier_type, loan.identifier)
# if key in local_loans_by_identifier:
# del local_loans_by_identifier[key]
#
# for hold in remote_holds:
# # This is a remote hold. Find or create the corresponding
# # local hold.
# pool = hold.license_pool(self._db)
# start = hold.start_date
# end = hold.end_date
# position = hold.hold_position
# key = (hold.identifier_type, hold.identifier)
# if key in local_holds_by_identifier:
# # We already have the Hold object, we don't need to look
# # it up again.
# local_hold = local_holds_by_identifier[key]
#
# # But maybe the remote's opinions as to the hold's
# # start or end date have changed.
# local_hold.update(start, end, position)
# else:
# local_hold, new = pool.on_hold_to(patron, start, end, position)
# active_holds.append(local_hold)
#
# # Check the local hold off the list we're keeping so that
# # we don't delete it later.
# if key in local_holds_by_identifier:
# del local_holds_by_identifier[key]
#
# # We only want to delete local loans and holds if we were able to
# # successfully sync with all the providers. If there was an error,
# # the provider might still know about a loan or hold that we don't
# # have in the remote lists.
# if complete:
# # Every loan remaining in loans_by_identifier is a hold that
# # the provider doesn't know about. This usually means it's expired
# # and we should get rid of it, but it's possible the patron is
# # borrowing a book and syncing their bookshelf at the same time,
# # and the local loan was created after we got the remote loans.
# # If the loan's start date is less than a minute ago, we'll keep it.
# for local_loan in list(local_loans_by_identifier.values()):
# if (
# local_loan.license_pool.collection_id
# in self.collection_ids_for_sync
# ):
# one_minute_ago = utc_now() - datetime.timedelta(minutes=1)
# if local_loan.start is None or local_loan.start < one_minute_ago:
# logging.info(
# "In sync_bookshelf for patron %s, deleting loan %s (patron %s)"
# % (
# patron.authorization_identifier,
# str(local_loan.id),
# local_loan.patron.authorization_identifier,
# )
# )
# self._db.delete(local_loan)
# else:
# logging.info(
# "In sync_bookshelf for patron %s, found local loan %s created in the past minute that wasn't in remote loans"
# % (patron.authorization_identifier, str(local_loan.id))
# )
#
# # Every hold remaining in holds_by_identifier is a hold that
# # the provider doesn't know about, which means it's expired
# # and we should get rid of it.
# for local_hold in list(local_holds_by_identifier.values()):
# if (
# local_hold.license_pool.collection_id
# in self.collection_ids_for_sync
# ):
# self._db.delete(local_hold)
#
# # Now that we're in sync (or not), set last_loan_activity_sync
# # to the conservative value obtained earlier.
# if patron:
# patron.last_loan_activity_sync = last_loan_activity_sync
#
# __transaction.commit()
# return active_loans, active_holds
8 changes: 8 additions & 0 deletions src/palace/manager/service/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
)
from palace.manager.service.logging.configuration import LoggingConfiguration
from palace.manager.service.logging.container import Logging
from palace.manager.service.redis.configuration import RedisConfiguration
from palace.manager.service.redis.container import RedisContainer
from palace.manager.service.search.configuration import SearchConfiguration
from palace.manager.service.search.container import Search
from palace.manager.service.sitewide import SitewideConfiguration
Expand Down Expand Up @@ -65,6 +67,11 @@ class Services(DeclarativeContainer):
IntegrationRegistryContainer,
)

redis = Container(
RedisContainer,
config=config.redis,
)


def wire_container(container: Services) -> None:
container.wire(
Expand Down Expand Up @@ -97,6 +104,7 @@ def create_container() -> Services:
"email": EmailConfiguration().dict(),
"celery": CeleryConfiguration().dict(),
"fcm": FcmConfiguration().dict(),
"redis": RedisConfiguration().dict(),
}
)
wire_container(container)
Expand Down
Empty file.
11 changes: 11 additions & 0 deletions src/palace/manager/service/redis/configuration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from pydantic import RedisDsn

from palace.manager.service.configuration import ServiceConfiguration


class RedisConfiguration(ServiceConfiguration):
url: RedisDsn
key_prefix: str = "palace"

class Config:
env_prefix = "PALACE_REDIS_"
22 changes: 22 additions & 0 deletions src/palace/manager/service/redis/container.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import redis
from dependency_injector import providers
from dependency_injector.containers import DeclarativeContainer

from palace.manager.service.redis.key import RedisKeyGenerator
from palace.manager.service.redis.redis import Redis


class RedisContainer(DeclarativeContainer):
config = providers.Configuration()

connection_pool: providers.Provider[redis.ConnectionPool] = providers.Singleton(
redis.ConnectionPool.from_url, url=config.url, decode_responses=True
)

key_generator: providers.Provider[RedisKeyGenerator] = providers.Singleton(
RedisKeyGenerator, prefix=config.key_prefix
)

client: providers.Provider[Redis] = providers.Singleton(
Redis, connection_pool=connection_pool, key_generator=key_generator
)
9 changes: 9 additions & 0 deletions src/palace/manager/service/redis/exception.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from palace.manager.core.exceptions import BasePalaceException


class RedisKeyError(BasePalaceException, TypeError):
...


class RedisValueError(BasePalaceException, ValueError):
...
Loading

0 comments on commit 54953c4

Please sign in to comment.