diff --git a/app/dao/api_key_dao.py b/app/dao/api_key_dao.py index 679467f4f4..f6a707bf76 100644 --- a/app/dao/api_key_dao.py +++ b/app/dao/api_key_dao.py @@ -68,6 +68,13 @@ def expire_api_key(service_id, api_key_id): db.session.add(api_key) +@transactional +def update_last_used_api_key(api_key_id, last_used=None) -> None: + api_key = ApiKey.query.filter_by(id=api_key_id).one() + api_key.last_used_timestamp = last_used if last_used else datetime.utcnow() + db.session.add(api_key) + + @transactional @version_class(ApiKey) def update_compromised_api_key_info(service_id, api_key_id, compromised_info): diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index bf886fbfd6..e67903428e 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -336,24 +336,35 @@ def get_total_notifications_sent_for_api_key(api_key_id): def get_last_send_for_api_key(api_key_id): """ + SELECT last_used_timestamp as last_notification_created + FROM api_keys + WHERE id = 'api_key_id'; + + If last_used_timestamp is null, then check notifications table/ or notification_history. SELECT max(created_at) as last_notification_created FROM notifications WHERE api_key_id = 'api_key_id' GROUP BY api_key_id; """ - notification_table = ( - db.session.query(func.max(Notification.created_at).label("last_notification_created")) - .filter(Notification.api_key_id == api_key_id) - .all() + # Fetch last_used_timestamp from api_keys table + api_key_table = ( + db.session.query(ApiKey.last_used_timestamp.label("last_notification_created")).filter(ApiKey.id == api_key_id).all() ) - if not notification_table[0][0]: + if not api_key_table[0][0]: notification_table = ( - db.session.query(func.max(NotificationHistory.created_at).label("last_notification_created")) - .filter(NotificationHistory.api_key_id == api_key_id) + db.session.query(func.max(Notification.created_at).label("last_notification_created")) + .filter(Notification.api_key_id == api_key_id) .all() ) - notification_table = [] if notification_table[0][0] is None else notification_table - return notification_table + if not notification_table[0][0]: + notification_table = ( + db.session.query(func.max(NotificationHistory.created_at).label("last_notification_created")) + .filter(NotificationHistory.api_key_id == api_key_id) + .all() + ) + notification_table = [] if notification_table[0][0] is None else notification_table + return notification_table + return api_key_table def get_api_key_ranked_by_notifications_created(n_days_back): diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 56a2dc98d3..d3dc3304c8 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -15,6 +15,7 @@ from app.celery import provider_tasks from app.celery.letters_pdf_tasks import create_letters_pdf from app.config import QueueNames +from app.dao.api_key_dao import update_last_used_api_key from app.dao.notifications_dao import ( bulk_insert_notifications, dao_create_notification, @@ -133,6 +134,8 @@ def persist_notification( if redis_store.get(redis.daily_limit_cache_key(service.id)): redis_store.incr(redis.daily_limit_cache_key(service.id)) current_app.logger.info("{} {} created at {}".format(notification_type, notification_id, notification_created_at)) + if api_key_id: + update_last_used_api_key(api_key_id, notification_created_at) return notification @@ -298,6 +301,7 @@ def persist_notifications(notifications: List[VerifiedNotification]) -> List[Not """ lofnotifications = [] + api_key_last_used = None for notification in notifications: notification_created_at = notification.get("created_at") or datetime.utcnow() @@ -357,7 +361,15 @@ def persist_notifications(notifications: List[VerifiedNotification]) -> List[Not notification.get("notification_created_at"), # type: ignore ) ) + # If the bulk message is sent using an api key, we want to keep track of the last time the api key was used + # We will only update the api key once + api_key_id = notification.get("api_key_id") + if api_key_id: + api_key_last_used = datetime.utcnow() + if api_key_last_used: + update_last_used_api_key(api_key_id, api_key_last_used) bulk_insert_notifications(lofnotifications) + return lofnotifications diff --git a/scripts/enlarge_db/README.md b/scripts/enlarge_db/README.md new file mode 100644 index 0000000000..b36090390b --- /dev/null +++ b/scripts/enlarge_db/README.md @@ -0,0 +1,22 @@ +# Enlarge DB + +## Purpose + +The purpose of this script is add rows to the notification_history table. This is useful in estimating how long database-related infrastructure operations will take when performed on a database the same size as that in production. + +## How to use + +The script should be run in the same environment as api. Locally this can be in the api repo devcontainer, while in AWS the api kubernetes pod would be preferred. + +To add 2000 rows to the table with a client_reference of "test2000" run + +``` +cd scripts/enlarge_db +python enlarge_db.py -n 2000 -r test2000 +``` + +The new notifications are added in batches to improve performance, with a default batch size of 10000. You may use a different batch with the `-c` parameter, for example + +``` +python enlarge_db.py -n 2000 -c 101 -r test2000x101 +``` diff --git a/scripts/enlarge_db/enlarge_db.py b/scripts/enlarge_db/enlarge_db.py new file mode 100644 index 0000000000..649e6f3032 --- /dev/null +++ b/scripts/enlarge_db/enlarge_db.py @@ -0,0 +1,55 @@ + +import argparse +import sys +from datetime import datetime +from typing import List + +from flask import Flask + +sys.path.append("../..") +from app import create_app, create_uuid, db # noqa: E402 +from app.config import Config # noqa: E402 +from app.models import NotificationHistory # noqa: E402 + +DEFAULT_CHUNK_SIZE = 10000 + + +def create_notifications(n: int, ref: str) -> List[NotificationHistory]: + notifications = [ + NotificationHistory( + id=create_uuid(), + created_at=datetime.utcnow(), + template_id=Config.NEW_USER_EMAIL_VERIFICATION_TEMPLATE_ID, + template_version=1, + service_id=Config.NOTIFY_SERVICE_ID, + notification_type="email", + key_type='normal', + client_reference=ref, + ) + for _ in range(n) + ] + return notifications + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("-n", "--notifications", default=1, type=int, help="number of notifications to add to the notification_history table (default 1)") + parser.add_argument("-r", "--reference", default="manually created", type=str, help="client reference to use for the notifications (default 'manually created')") + parser.add_argument("-c", "--chunksize", default=DEFAULT_CHUNK_SIZE, type=int, help=f"chunk size for bulk_save_objects (default {DEFAULT_CHUNK_SIZE})") + args = parser.parse_args() + + app = Flask("enlarge_db") + create_app(app) + + for notifications_done in range(0, args.notifications, args.chunksize): + notifications = create_notifications(min(args.chunksize, args.notifications - notifications_done), args.reference) + print(f"Adding {len(notifications)} notifications to notification_history") + with app.app_context(): + try: + db.session.bulk_save_objects(notifications) + db.session.commit() + except Exception as e: + print(f"Error adding notifications: {e}") + db.session.rollback() + sys.exit(1) + print(f"Done {notifications_done+len(notifications)} / {args.notifications}") diff --git a/tests/app/dao/test_api_key_dao.py b/tests/app/dao/test_api_key_dao.py index 7642325ded..5e23002171 100644 --- a/tests/app/dao/test_api_key_dao.py +++ b/tests/app/dao/test_api_key_dao.py @@ -14,6 +14,7 @@ resign_api_keys, save_model_api_key, update_compromised_api_key_info, + update_last_used_api_key, ) from app.models import KEY_TYPE_NORMAL, ApiKey from tests.app.db import create_api_key @@ -61,6 +62,17 @@ def test_expire_api_key_should_update_the_api_key_and_create_history_record(noti sorted_all_history[1].version = 2 +def test_last_used_should_update_the_api_key_and_not_create_history_record(notify_api, sample_api_key): + last_used = datetime.utcnow() + update_last_used_api_key(api_key_id=sample_api_key.id, last_used=last_used) + all_api_keys = get_model_api_keys(service_id=sample_api_key.service_id) + assert len(all_api_keys) == 1 + assert all_api_keys[0].last_used_timestamp == last_used + + all_history = sample_api_key.get_history_model().query.all() + assert len(all_history) == 1 + + def test_update_compromised_api_key_info_and_create_history_record(notify_api, sample_api_key): update_compromised_api_key_info( service_id=sample_api_key.service_id, api_key_id=sample_api_key.id, compromised_info={"key": "value"} diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 6e6087d2cc..663cf859f8 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -356,6 +356,14 @@ def test_get_total_notifications_sent_for_api_key(notify_db_session): assert dict(api_key_stats_3) == dict([(EMAIL_TYPE, total_sends), (SMS_TYPE, total_sends)]) +def test_get_last_send_for_api_key_check_last_used(notify_db_session): + service = create_service(service_name="First Service") + api_key = create_api_key(service, last_used=datetime.utcnow()) + + last_send = get_last_send_for_api_key(str(api_key.id))[0][0] + assert last_send == api_key.last_used_timestamp + + def test_get_last_send_for_api_key(notify_db_session): service = create_service(service_name="First Service") api_key = create_api_key(service) diff --git a/tests/app/db.py b/tests/app/db.py index 9e79f5042c..1dacec37bc 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -551,7 +551,7 @@ def create_letter_rate( return rate -def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None): +def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None, last_used=None): id_ = uuid.uuid4() name = key_name if key_name else "{} api key {}".format(key_type, id_) @@ -563,6 +563,7 @@ def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None): key_type=key_type, id=id_, secret=uuid.uuid4(), + last_used_timestamp=last_used, ) db.session.add(api_key) db.session.commit() diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index bb194d3c28..eaa0c00da0 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -19,6 +19,7 @@ LETTER_TYPE, NORMAL, PRIORITY, + ApiKey, Notification, NotificationHistory, ScheduledNotification, @@ -433,6 +434,10 @@ def test_persist_notifications_list(self, sample_job, sample_api_key, notify_db_ assert persisted_notification[1].to == "foo2@bar.com" assert persisted_notification[0].service == sample_job.service + # Test that the api key last_used_timestamp got updated + api_key = ApiKey.query.get(sample_api_key.id) + assert api_key.last_used_timestamp is not None + def test_persist_notifications_reply_to_text_is_original_value_if_sender_is_changed_later( self, sample_template, sample_api_key, mocker ): @@ -913,6 +918,8 @@ def test_transform_email_notification_stores_normalised_email( assert persisted_notification.to == recipient assert persisted_notification.normalised_to == expected_recipient_normalised + api_key = ApiKey.query.get(sample_api_key.id) + assert api_key.last_used_timestamp is not None class TestDBSaveAndSendNotification: