From ad21ce1b741537f6cb12e33599596ba69bb3f2b6 Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Wed, 13 Nov 2024 16:35:34 -0400 Subject: [PATCH 01/10] Clear Redis notification counts during nightly fact job (#2344) * Clear Redis notification counts during nightly fact job * Add seeding logic to receipt tasks - Formatting * Refactor seeding logic * bump utils to 52.3.7 * Squash duplicate counting bug - Fix tests - Monthly stats no longer returns stats for the current day - Misc. cleanups comments * Add feature flag checks * Fixes and formatting --- .github/workflows/test.yaml | 2 +- app/celery/process_pinpoint_receipts_tasks.py | 40 +++- app/celery/process_ses_receipts_tasks.py | 29 ++- app/celery/process_sns_receipts_tasks.py | 28 ++- app/celery/reporting_tasks.py | 6 +- app/dao/fact_notification_status_dao.py | 20 +- app/service/rest.py | 8 +- app/utils.py | 35 ++++ poetry.lock | 8 +- pyproject.toml | 2 +- .../test_process_pinpoint_receipts_tasks.py | 181 +++++++++++------- .../celery/test_process_ses_receipts_tasks.py | 38 ++++ .../celery/test_process_sns_receipts_tasks.py | 157 +++++++++------ tests/app/celery/test_reporting_tasks.py | 23 +++ tests/app/service/test_statistics_rest.py | 58 +++--- 15 files changed, 458 insertions(+), 177 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 037f12dbb5..bdffe264a4 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -69,7 +69,7 @@ jobs: run: | cp -f .env.example .env - name: Checks for new endpoints against AWS WAF rules - uses: cds-snc/notification-utils/.github/actions/waffles@52.3.6 + uses: cds-snc/notification-utils/.github/actions/waffles@52.3.7 with: app-loc: '/github/workspace' app-libs: '/github/workspace/env/site-packages' diff --git a/app/celery/process_pinpoint_receipts_tasks.py b/app/celery/process_pinpoint_receipts_tasks.py index d1732c9b38..5edd796169 100644 --- a/app/celery/process_pinpoint_receipts_tasks.py +++ b/app/celery/process_pinpoint_receipts_tasks.py @@ -3,11 +3,15 @@ from flask import current_app, json from notifications_utils.statsd_decorators import statsd +from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.orm.exc import NoResultFound from app import annual_limit_client, notify_celery, statsd_client from app.config import QueueNames from app.dao import notifications_dao +from app.dao.fact_notification_status_dao import ( + fetch_notification_status_for_service_for_day, +) from app.models import ( NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE, @@ -17,6 +21,7 @@ PINPOINT_PROVIDER, ) from app.notifications.callbacks import _check_and_queue_callback_task +from app.utils import prepare_notification_counts_for_seeding from celery.exceptions import Retry # Pinpoint receipts are of the form: @@ -106,6 +111,22 @@ def process_pinpoint_results(self, response): sms_origination_phone_number=origination_phone_number, ) + service_id = notification.service_id + # Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification + # is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count. + notifications_to_seed = None + + if current_app.config["FF_ANNUAL_LIMIT"]: + if not annual_limit_client.was_seeded_today(service_id): + annual_limit_client.set_seeded_at(service_id) + notifications_to_seed = fetch_notification_status_for_service_for_day( + convert_utc_to_local_timezone(datetime.utcnow()), + service_id=service_id, + ) + annual_limit_client.seed_annual_limit_notifications( + service_id, prepare_notification_counts_for_seeding(notifications_to_seed) + ) + if notification_status != NOTIFICATION_DELIVERED: current_app.logger.info( ( @@ -115,25 +136,34 @@ def process_pinpoint_results(self, response): ) # TODO FF_ANNUAL_LIMIT removal if current_app.config["FF_ANNUAL_LIMIT"]: - annual_limit_client.increment_sms_failed(notification.service_id) + # Only increment if we didn't just seed. + if notifications_to_seed is None: + annual_limit_client.increment_sms_failed(service_id) current_app.logger.info( - f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" + f"Incremented sms_delivered count in Redis. Service: {service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(service_id)}" ) else: current_app.logger.info( f"Pinpoint callback return status of {notification_status} for notification: {notification.id}" ) + # TODO FF_ANNUAL_LIMIT removal if current_app.config["FF_ANNUAL_LIMIT"]: - annual_limit_client.increment_sms_delivered(notification.service_id) + # Only increment if we didn't just seed. + if notifications_to_seed is None: + annual_limit_client.increment_sms_delivered(service_id) current_app.logger.info( - f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" + f"Incremented sms_delivered count in Redis. Service: {service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(service_id)}" ) statsd_client.incr(f"callback.pinpoint.{notification_status}") if notification.sent_at: - statsd_client.timing_with_dates("callback.pinpoint.elapsed-time", datetime.utcnow(), notification.sent_at) + statsd_client.timing_with_dates( + "callback.pinpoint.elapsed-time", + datetime.utcnow(), + notification.sent_at, + ) _check_and_queue_callback_task(notification) diff --git a/app/celery/process_ses_receipts_tasks.py b/app/celery/process_ses_receipts_tasks.py index 86811bb720..5d2c29b133 100644 --- a/app/celery/process_ses_receipts_tasks.py +++ b/app/celery/process_ses_receipts_tasks.py @@ -2,11 +2,15 @@ from flask import current_app, json from notifications_utils.statsd_decorators import statsd +from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.orm.exc import NoResultFound from app import annual_limit_client, bounce_rate_client, notify_celery, statsd_client from app.config import QueueNames from app.dao import notifications_dao +from app.dao.fact_notification_status_dao import ( + fetch_notification_status_for_service_for_day, +) from app.models import NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE from app.notifications.callbacks import _check_and_queue_callback_task from app.notifications.notifications_ses_callback import ( @@ -14,6 +18,7 @@ get_aws_responses, handle_complaint, ) +from app.utils import prepare_notification_counts_for_seeding from celery.exceptions import Retry @@ -83,6 +88,22 @@ def process_ses_results(self, response): # noqa: C901 bounce_response=aws_response_dict.get("bounce_response", None), ) + service_id = notification.service_id + # Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification + # is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count. + notifications_to_seed = None + # Check if we have already seeded the annual limit counts for today + if current_app.config["FF_ANNUAL_LIMIT"]: + if not annual_limit_client.was_seeded_today(service_id): + annual_limit_client.set_seeded_at(service_id) + notifications_to_seed = fetch_notification_status_for_service_for_day( + convert_utc_to_local_timezone(datetime.utcnow()), + service_id=service_id, + ) + annual_limit_client.seed_annual_limit_notifications( + service_id, prepare_notification_counts_for_seeding(notifications_to_seed) + ) + if not aws_response_dict["success"]: current_app.logger.info( "SES delivery failed: notification id {} and reference {} has error found. Status {}".format( @@ -90,7 +111,9 @@ def process_ses_results(self, response): # noqa: C901 ) ) if current_app.config["FF_ANNUAL_LIMIT"]: - annual_limit_client.increment_email_failed(notification.service_id) + # Only increment if we didn't just seed. + if notifications_to_seed is None: + annual_limit_client.increment_email_failed(notification.service_id) current_app.logger.info( f"Incremented email_failed count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) @@ -99,7 +122,9 @@ def process_ses_results(self, response): # noqa: C901 "SES callback return status of {} for notification: {}".format(notification_status, notification.id) ) if current_app.config["FF_ANNUAL_LIMIT"]: - annual_limit_client.increment_email_delivered(notification.service_id) + # Only increment if we didn't just seed. + if notifications_to_seed is None: + annual_limit_client.increment_email_delivered(notification.service_id) current_app.logger.info( f"Incremented email_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) diff --git a/app/celery/process_sns_receipts_tasks.py b/app/celery/process_sns_receipts_tasks.py index 1ca9608a6a..326784f3e3 100644 --- a/app/celery/process_sns_receipts_tasks.py +++ b/app/celery/process_sns_receipts_tasks.py @@ -2,11 +2,15 @@ from flask import current_app, json from notifications_utils.statsd_decorators import statsd +from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.orm.exc import NoResultFound from app import annual_limit_client, notify_celery, statsd_client from app.config import QueueNames from app.dao import notifications_dao +from app.dao.fact_notification_status_dao import ( + fetch_notification_status_for_service_for_day, +) from app.models import ( NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE, @@ -16,6 +20,7 @@ SNS_PROVIDER, ) from app.notifications.callbacks import _check_and_queue_callback_task +from app.utils import prepare_notification_counts_for_seeding from celery.exceptions import Retry @@ -64,6 +69,21 @@ def process_sns_results(self, response): provider_response=provider_response, ) + service_id = notification.service_id + # Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification + # is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count. + notifications_to_seed = None + if current_app.config["FF_ANNUAL_LIMIT"]: + if not annual_limit_client.was_seeded_today(service_id): + annual_limit_client.set_seeded_at(service_id) + notifications_to_seed = fetch_notification_status_for_service_for_day( + convert_utc_to_local_timezone(datetime.utcnow()), + service_id=service_id, + ) + annual_limit_client.seed_annual_limit_notifications( + service_id, prepare_notification_counts_for_seeding(notifications_to_seed) + ) + if notification_status != NOTIFICATION_DELIVERED: current_app.logger.info( ( @@ -73,7 +93,9 @@ def process_sns_results(self, response): ) # TODO FF_ANNUAL_LIMIT removal if current_app.config["FF_ANNUAL_LIMIT"]: - annual_limit_client.increment_sms_failed(notification.service_id) + # Only increment if we didn't just seed. + if notifications_to_seed is None: + annual_limit_client.increment_sms_failed(notification.service_id) current_app.logger.info( f"Incremented sms_failed count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) @@ -81,7 +103,9 @@ def process_sns_results(self, response): current_app.logger.info(f"SNS callback return status of {notification_status} for notification: {notification.id}") # TODO FF_ANNUAL_LIMIT removal if current_app.config["FF_ANNUAL_LIMIT"]: - annual_limit_client.increment_sms_delivered(notification.service_id) + # Only increment if we didn't just seed. + if notifications_to_seed is None: + annual_limit_client.increment_sms_delivered(notification.service_id) current_app.logger.info( f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) diff --git a/app/celery/reporting_tasks.py b/app/celery/reporting_tasks.py index 8090d42bb7..c74b1de247 100644 --- a/app/celery/reporting_tasks.py +++ b/app/celery/reporting_tasks.py @@ -5,7 +5,7 @@ from notifications_utils.statsd_decorators import statsd from notifications_utils.timezones import convert_utc_to_local_timezone -from app import notify_celery +from app import annual_limit_client, notify_celery from app.config import QueueNames from app.cronitor import cronitor from app.dao.annual_limits_data_dao import get_previous_quarter, insert_quarter_data @@ -113,6 +113,10 @@ def create_nightly_notification_status_for_day(process_day): len(transit_data), process_day, chunk ) ) + # TODO: FF_ANNUAL_LIMIT removal + if current_app.config["FF_ANNUAL_LIMIT"]: + annual_limit_client.reset_all_notification_counts(chunk) + except Exception as e: current_app.logger.error( "create-nightly-notification-status-for-day task failed for day: {}, for service_ids: {}. Error: {}".format( diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index dc3ec5be7d..75e0ca1371 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -130,6 +130,18 @@ def update_fact_notification_status(data, process_day, service_ids=None): def fetch_notification_status_for_service_by_month(start_date, end_date, service_id): + filters = [ + FactNotificationStatus.service_id == service_id, + FactNotificationStatus.bst_date >= start_date.strftime("%Y-%m-%d"), + # This works only for timezones to the west of GMT + FactNotificationStatus.bst_date < end_date.strftime("%Y-%m-%d"), + FactNotificationStatus.key_type != KEY_TYPE_TEST, + ] + + # TODO FF_ANNUAL_LIMIT removal + if current_app.config["FF_ANNUAL_LIMIT"]: + filters.append(FactNotificationStatus.bst_date != datetime.utcnow().date().strftime("%Y-%m-%d")) + return ( db.session.query( func.date_trunc("month", FactNotificationStatus.bst_date).label("month"), @@ -137,13 +149,7 @@ def fetch_notification_status_for_service_by_month(start_date, end_date, service FactNotificationStatus.notification_status, func.sum(FactNotificationStatus.notification_count).label("count"), ) - .filter( - FactNotificationStatus.service_id == service_id, - FactNotificationStatus.bst_date >= start_date.strftime("%Y-%m-%d"), - # This works only for timezones to the west of GMT - FactNotificationStatus.bst_date < end_date.strftime("%Y-%m-%d"), - FactNotificationStatus.key_type != KEY_TYPE_TEST, - ) + .filter(*filters) .group_by( func.date_trunc("month", FactNotificationStatus.bst_date).label("month"), FactNotificationStatus.notification_type, diff --git a/app/service/rest.py b/app/service/rest.py index 84c2682820..ed3d9b7752 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -404,7 +404,10 @@ def create_api_key(service_id=None): unsigned_api_key = get_unsigned_secret(valid_api_key.id) # prefix the API key so they keys can be easily identified for security scanning - keydata = {"key": unsigned_api_key, "key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name} + keydata = { + "key": unsigned_api_key, + "key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name, + } return jsonify(data=keydata), 201 @@ -631,7 +634,8 @@ def get_monthly_notification_stats(service_id): statistics.add_monthly_notification_status_stats(data, stats) now = datetime.utcnow() - if end_date > now: + # TODO FF_ANNUAL_LIMIT removal + if not current_app.config["FF_ANNUAL_LIMIT"] and end_date > now: todays_deltas = fetch_notification_status_for_service_for_day(convert_utc_to_local_timezone(now), service_id=service_id) statistics.add_monthly_notification_status_stats(data, todays_deltas) diff --git a/app/utils.py b/app/utils.py index 1836093753..165991a71d 100644 --- a/app/utils.py +++ b/app/utils.py @@ -15,6 +15,16 @@ local_timezone = pytz.timezone(os.getenv("TIMEZONE", "America/Toronto")) +DELIVERED_STATUSES = ["delivered", "sent", "returned-letter"] +FAILURE_STATUSES = [ + "failed", + "temporary-failure", + "permanent-failure", + "technical-failure", + "virus-scan-failed", + "validation-failed", +] + def pagination_links(pagination, endpoint, **kwargs): if "page" in kwargs: @@ -221,3 +231,28 @@ def get_limit_reset_time_et() -> dict[str, str]: limit_reset_time_et = {"12hr": next_midnight_utc_in_et.strftime("%-I%p"), "24hr": next_midnight_utc_in_et.strftime("%H")} return limit_reset_time_et + + +def prepare_notification_counts_for_seeding(notification_counts: list) -> dict: + """Utility method that transforms a list of notification counts into a dictionary, mapping notification counts by type and success/failure. + Used to seed notification counts in Redis for annual limits. + e.g. + ``` + [(datetime, 'email', 'sent', 1), + (datetime, 'sms', 'sent', 2)] + ``` + Becomes: + ``` + {'email_sent': 1, 'sms_sent': 2} + ``` + Args: + notification_counts (list): A list of tuples containing (date, notification_type, status, count) + + Returns: + dict: That acts as a mapping to build the notification counts in Redis + """ + return { + f"{notification_type}_{'delivered' if status in DELIVERED_STATUSES else 'failed'}": count + for _, notification_type, status, count in notification_counts + if status in DELIVERED_STATUSES or status in FAILURE_STATUSES + } diff --git a/poetry.lock b/poetry.lock index b2ee531e85..e1e1f9a2d4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2657,7 +2657,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.3.6" +version = "52.3.7" description = "Shared python code for Notification - Provides logging utils etc." optional = false python-versions = "~3.10.9" @@ -2693,8 +2693,8 @@ werkzeug = "3.0.4" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "52.3.6" -resolved_reference = "6a57a6b91cdfc18b823ad4476744515f89d4b7be" +reference = "52.3.7" +resolved_reference = "42ded1e06cfac4209c0dd9d3131cf202b8e210bd" [[package]] name = "ordered-set" @@ -4623,4 +4623,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "d6297cc2dcb00a6b90e6a5914a1f5357cce172e853e12f9d221375f276c516d3" +content-hash = "9f820c94df5fda8d9619bba5e0294cc0a8108e68d7bf73646e5e099157f1fe4a" diff --git a/pyproject.toml b/pyproject.toml index fc9abadfaa..4bce92347b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,7 +65,7 @@ Werkzeug = "3.0.4" MarkupSafe = "2.1.5" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.2.0" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.6" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.7" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.12.2" diff --git a/tests/app/celery/test_process_pinpoint_receipts_tasks.py b/tests/app/celery/test_process_pinpoint_receipts_tasks.py index 84c72c3615..ec30fbe563 100644 --- a/tests/app/celery/test_process_pinpoint_receipts_tasks.py +++ b/tests/app/celery/test_process_pinpoint_receipts_tasks.py @@ -281,75 +281,120 @@ def test_process_pinpoint_results_calls_service_callback(sample_template, notify ) -@pytest.mark.parametrize( - "provider_response", - [ - "Blocked as spam by phone carrier", - "Destination is on a blocked list", - "Invalid phone number", - "Message body is invalid", - "Phone carrier has blocked this message", - "Phone carrier is currently unreachable/unavailable", - "Phone has blocked SMS", - "Phone is on a blocked list", - "Phone is currently unreachable/unavailable", - "Phone number is opted out", - "This delivery would exceed max price", - "Unknown error attempting to reach phone", - ], -) -def test_process_pinpoint_results_should_increment_sms_failed_when_delivery_receipt_is_failure( - sample_sms_template_with_html, - notify_api, - mocker, - provider_response, -): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="pinpoint", +class TestAnnualLimits: + @pytest.mark.parametrize( + "provider_response", + [ + "Blocked as spam by phone carrier", + "Destination is on a blocked list", + "Invalid phone number", + "Message body is invalid", + "Phone carrier has blocked this message", + "Phone carrier is currently unreachable/unavailable", + "Phone has blocked SMS", + "Phone is on a blocked list", + "Phone is currently unreachable/unavailable", + "Phone number is opted out", + "This delivery would exceed max price", + "Unknown error attempting to reach phone", + ], + ) + def test_process_pinpoint_results_should_increment_sms_failed_when_delivery_receipt_is_failure( + self, + sample_sms_template_with_html, + notify_api, + mocker, + provider_response, + ): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="pinpoint", + ) ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + process_pinpoint_results(pinpoint_failed_callback(reference="ref", provider_response=provider_response)) + annual_limit_client.increment_sms_failed.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_delivered.assert_not_called() + + @pytest.mark.parametrize( + "callback", + [ + (pinpoint_delivered_callback), + (pinpoint_shortcode_delivered_callback), + ], ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - process_pinpoint_results(pinpoint_failed_callback(reference="ref", provider_response=provider_response)) - annual_limit_client.increment_sms_failed.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_delivered.assert_not_called() - - -@pytest.mark.parametrize( - "callback", - [ - (pinpoint_delivered_callback), - (pinpoint_shortcode_delivered_callback), - ], -) -def test_process_pinpoint_results_should_increment_sms_delivered_when_delivery_receipt_is_success( - sample_sms_template_with_html, - notify_api, - mocker, - callback, -): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="pinpoint", + def test_process_pinpoint_results_should_increment_sms_delivered_when_delivery_receipt_is_success( + self, + sample_sms_template_with_html, + notify_api, + mocker, + callback, + ): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="pinpoint", + ) ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + process_pinpoint_results(callback(reference="ref")) + annual_limit_client.increment_sms_delivered.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_failed.assert_not_called() + + @pytest.mark.parametrize( + "callback, provider_response", + [ + (pinpoint_delivered_callback, None), + (pinpoint_failed_callback, "Blocked as spam by phone carrier"), + (pinpoint_failed_callback, "Phone carrier is currently unreachable/unavailable"), + (pinpoint_failed_callback, "Phone is currently unreachable/unavailable"), + (pinpoint_failed_callback, "This is not a real response"), + ], ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - process_pinpoint_results(callback(reference="ref")) - annual_limit_client.increment_sms_delivered.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_failed.assert_not_called() + def test_process_pinpoint_results_seeds_annual_limit_notifications_when_not_seeded_today_and_doesnt_increment_when_seeding( + self, + callback, + provider_response, + sample_sms_template_with_html, + notify_api, + mocker, + ): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=False) + mocker.patch("app.annual_limit_client.set_seeded_at") + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="pinpoint", + ) + ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + process_pinpoint_results( + callback(provider_response, reference="ref") if provider_response else callback(reference="ref") + ) + annual_limit_client.set_seeded_at.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_delivered.assert_not_called() + annual_limit_client.increment_sms_failed.assert_not_called() diff --git a/tests/app/celery/test_process_ses_receipts_tasks.py b/tests/app/celery/test_process_ses_receipts_tasks.py index 117a6a41e2..42e669282c 100644 --- a/tests/app/celery/test_process_ses_receipts_tasks.py +++ b/tests/app/celery/test_process_ses_receipts_tasks.py @@ -446,6 +446,7 @@ def test_ses_callback_should_increment_email_delivered_when_delivery_receipt_is_ ): mocker.patch("app.annual_limit_client.increment_email_delivered") mocker.patch("app.annual_limit_client.increment_email_failed") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) # TODO FF_ANNUAL_LIMIT removal with set_config(notify_api, "FF_ANNUAL_LIMIT", True): @@ -468,6 +469,7 @@ def test_ses_callback_should_increment_email_failed_when_delivery_receipt_is_fai ): mocker.patch("app.annual_limit_client.increment_email_failed") mocker.patch("app.annual_limit_client.increment_email_delivered") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) # TODO FF_ANNUAL_LIMIT removal with set_config(notify_api, "FF_ANNUAL_LIMIT", True): @@ -476,3 +478,39 @@ def test_ses_callback_should_increment_email_failed_when_delivery_receipt_is_fai assert process_ses_results(callback(reference="ref")) annual_limit_client.increment_email_failed.assert_called_once_with(sample_email_template.service_id) annual_limit_client.increment_email_delivered.assert_not_called() + + @pytest.mark.parametrize( + "callback", + [ + ses_notification_callback, + ses_hard_bounce_callback, + ses_soft_bounce_callback, + ], + ) + def test_process_ses_results_seeds_annual_limit_notifications_when_not_seeded_today_and_doesnt_increment_when_seeding( + self, + callback, + sample_email_template, + notify_api, + mocker, + ): + mocker.patch("app.annual_limit_client.increment_email_delivered") + mocker.patch("app.annual_limit_client.increment_email_failed") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=False) + mocker.patch("app.annual_limit_client.set_seeded_at") + + notification = save_notification( + create_notification( + sample_email_template, + reference="ref", + sent_at=datetime.utcnow(), + status="sending", + sent_by="ses", + ) + ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + process_ses_results(callback(reference="ref")) + annual_limit_client.set_seeded_at.assert_called_once_with(notification.service_id) + annual_limit_client.increment_email_delivered.assert_not_called() + annual_limit_client.increment_email_failed.assert_not_called() diff --git a/tests/app/celery/test_process_sns_receipts_tasks.py b/tests/app/celery/test_process_sns_receipts_tasks.py index c443b5d0d3..79cd534a3e 100644 --- a/tests/app/celery/test_process_sns_receipts_tasks.py +++ b/tests/app/celery/test_process_sns_receipts_tasks.py @@ -189,66 +189,111 @@ def test_process_sns_results_calls_service_callback(sample_template, notify_db_s statsd_client.incr.assert_any_call("callback.sns.delivered") updated_notification = get_notification_by_id(notification.id) signed_data = create_delivery_status_callback_data(updated_notification, callback_api) - send_mock.assert_called_once_with([str(notification.id), signed_data, notification.service_id], queue="service-callbacks") - - -def test_sns_callback_should_increment_sms_delivered_when_delivery_receipt_is_delivered( - sample_sms_template_with_html, notify_api, mocker -): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="sns", + send_mock.assert_called_once_with( + [str(notification.id), signed_data, notification.service_id], + queue="service-callbacks", ) - ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - assert process_sns_results(sns_success_callback(reference="ref")) - annual_limit_client.increment_sms_delivered.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_failed.assert_not_called() +class TestAnnualLimit: + def test_sns_callback_should_increment_sms_delivered_when_delivery_receipt_is_delivered( + self, sample_sms_template_with_html, notify_api, mocker + ): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="sns", + ) + ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + assert process_sns_results(sns_success_callback(reference="ref")) -@pytest.mark.parametrize( - "provider_response", - [ - "Blocked as spam by phone carrier", - "Destination is on a blocked list", - "Invalid phone number", - "Message body is invalid", - "Phone carrier has blocked this message", - "Phone carrier is currently unreachable/unavailable", - "Phone has blocked SMS", - "Phone is on a blocked list", - "Phone is currently unreachable/unavailable", - "Phone number is opted out", - "This delivery would exceed max price", - "Unknown error attempting to reach phone", - ], -) -def test_sns_callback_should_increment_sms_failed_when_delivery_receipt_is_failure( - sample_sms_template_with_html, notify_api, mocker, provider_response -): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") + annual_limit_client.increment_sms_delivered.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_failed.assert_not_called() - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="sns", + @pytest.mark.parametrize( + "provider_response", + [ + "Blocked as spam by phone carrier", + "Destination is on a blocked list", + "Invalid phone number", + "Message body is invalid", + "Phone carrier has blocked this message", + "Phone carrier is currently unreachable/unavailable", + "Phone has blocked SMS", + "Phone is on a blocked list", + "Phone is currently unreachable/unavailable", + "Phone number is opted out", + "This delivery would exceed max price", + "Unknown error attempting to reach phone", + ], + ) + def test_sns_callback_should_increment_sms_failed_when_delivery_receipt_is_failure( + self, sample_sms_template_with_html, notify_api, mocker, provider_response + ): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="sns", + ) ) + + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + assert process_sns_results(sns_failed_callback(reference="ref", provider_response=provider_response)) + annual_limit_client.increment_sms_failed.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_delivered.assert_not_called() + + @pytest.mark.parametrize( + "callback, provider_response", + [ + (sns_success_callback, None), + (sns_failed_callback, "Blocked as spam by phone carrier"), + (sns_failed_callback, "Phone carrier is currently unreachable/unavailable"), + (sns_failed_callback, "Phone is currently unreachable/unavailable"), + (sns_failed_callback, "This is not a real response"), + ], ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - assert process_sns_results(sns_failed_callback(reference="ref", provider_response=provider_response)) - annual_limit_client.increment_sms_failed.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_delivered.assert_not_called() + def test_process_sns_results_seeds_annual_limit_notifications_when_not_seeded_today_and_doesnt_increment_when_seeding( + self, + callback, + provider_response, + sample_sms_template_with_html, + notify_api, + mocker, + ): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + mocker.patch("app.annual_limit_client.was_seeded_today", return_value=False) + mocker.patch("app.annual_limit_client.set_seeded_at") + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="sns", + ) + ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + process_sns_results(callback(provider_response, reference="ref") if provider_response else callback(reference="ref")) + annual_limit_client.set_seeded_at.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_delivered.assert_not_called() + annual_limit_client.increment_sms_failed.assert_not_called() diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index 53ceb547ca..6b987fcb9a 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -1,3 +1,4 @@ +import uuid from datetime import date, datetime, timedelta from decimal import Decimal @@ -30,8 +31,10 @@ create_rate, create_service, create_template, + create_user, save_notification, ) +from tests.conftest import set_config def mocker_get_rate( @@ -584,6 +587,26 @@ def test_create_nightly_notification_status_for_day_respects_local_timezone( assert noti_status[0].notification_status == "created" +@freeze_time("2019-04-01T5:30") +def test_create_nightly_notification_status_for_day_clears_failed_delivered_notification_counts( + sample_template, notify_api, mocker +): + mock_reset_counts = mocker.patch("app.annual_limit_client.reset_all_notification_counts") + for i in range(39): + user = create_user(email=f"test{i}@test.ca", mobile_number=f"{i}234567890") + service = create_service(service_id=uuid.uuid4(), service_name=f"service{i}", user=user, email_from=f"best.email{i}") + template_sms = create_template(service=service) + template_email = create_template(service=service, template_type="email") + save_notification(create_notification(template_sms, status="sent", created_at=datetime(2019, 4, 1, 5, 0))) + save_notification(create_notification(template_email, status="sent", created_at=datetime(2019, 4, 1, 5, 0))) + save_notification(create_notification(template_sms, status="failed", created_at=datetime(2019, 4, 1, 5, 0))) + save_notification(create_notification(template_email, status="delivered", created_at=datetime(2019, 4, 1, 5, 0))) + + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + create_nightly_notification_status_for_day("2019-04-01") + assert mock_reset_counts.call_count == 2 + + class TestInsertQuarterData: def test_insert_quarter_data(self, notify_db_session): service_1 = create_service(service_name="service_1") diff --git a/tests/app/service/test_statistics_rest.py b/tests/app/service/test_statistics_rest.py index 907d2fb5d2..a978979248 100644 --- a/tests/app/service/test_statistics_rest.py +++ b/tests/app/service/test_statistics_rest.py @@ -20,6 +20,7 @@ create_template, save_notification, ) +from tests.conftest import set_config @freeze_time("2017-11-11 06:00") @@ -222,7 +223,7 @@ def test_get_monthly_notification_stats_returns_stats(admin_request, sample_serv @freeze_time("2016-06-05 00:00:00") # This test assumes the local timezone is EST -def test_get_monthly_notification_stats_combines_todays_data_and_historic_stats(admin_request, sample_template): +def test_get_monthly_notification_stats_combines_todays_data_and_historic_stats(admin_request, notify_api, sample_template): create_ft_notification_status(datetime(2016, 5, 1), template=sample_template, count=1) create_ft_notification_status( datetime(2016, 6, 1), @@ -237,27 +238,27 @@ def test_get_monthly_notification_stats_combines_todays_data_and_historic_stats( # this doesn't get returned in the stats because it is old - it should be in ft_notification_status by now save_notification(create_notification(sample_template, created_at=datetime(2016, 6, 4), status="sending")) - response = admin_request.get( - "service.get_monthly_notification_stats", - service_id=sample_template.service_id, - year=2016, - ) + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + response = admin_request.get( + "service.get_monthly_notification_stats", + service_id=sample_template.service_id, + year=2016, + ) - assert len(response["data"]) == 3 # apr, may, jun - assert response["data"]["2016-05"] == { - "sms": {"delivered": 1}, - "email": {}, - "letter": {}, - } - assert response["data"]["2016-06"] == { - "sms": { - # combines the stats from the historic ft_notification_status and the current notifications - "created": 3, - "delivered": 1, - }, - "email": {}, - "letter": {}, - } + assert len(response["data"]) == 3 # apr, may, jun + assert response["data"]["2016-05"] == { + "sms": {"delivered": 1}, + "email": {}, + "letter": {}, + } + assert response["data"]["2016-06"] == { + "sms": { + # combines the stats from the historic ft_notification_status and the current notifications + "created": 2, + }, + "email": {}, + "letter": {}, + } # This test assumes the local timezone is EST @@ -295,7 +296,7 @@ def test_get_monthly_notification_stats_checks_dates(admin_request, sample_servi assert response["data"]["2017-03"]["sms"] == {"delivered": 1} -def test_get_monthly_notification_stats_only_gets_for_one_service(admin_request, notify_db_session): +def test_get_monthly_notification_stats_only_gets_for_one_service(admin_request, notify_api, notify_db_session): services = [create_service(), create_service(service_name="2")] templates = [create_template(services[0]), create_template(services[1])] @@ -303,10 +304,11 @@ def test_get_monthly_notification_stats_only_gets_for_one_service(admin_request, create_ft_notification_status(datetime(2016, 6, 1), template=templates[0], notification_status="created") create_ft_notification_status(datetime(2016, 6, 1), template=templates[1], notification_status="delivered") - response = admin_request.get("service.get_monthly_notification_stats", service_id=services[0].id, year=2016) + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + response = admin_request.get("service.get_monthly_notification_stats", service_id=services[0].id, year=2016) - assert response["data"]["2016-06"] == { - "sms": {"created": 1}, - "email": {}, - "letter": {}, - } + assert response["data"]["2016-06"] == { + "sms": {"created": 1}, + "email": {}, + "letter": {}, + } From b93858563b7b3029e12b6163b16dcdae2be2c60e Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Wed, 13 Nov 2024 17:32:55 -0400 Subject: [PATCH 02/10] Bump utils for annual limit client fixes (#2352) * bump utils for annual limit client fixes * update lock file --- .github/workflows/test.yaml | 2 +- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index bdffe264a4..43657d10c6 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -69,7 +69,7 @@ jobs: run: | cp -f .env.example .env - name: Checks for new endpoints against AWS WAF rules - uses: cds-snc/notification-utils/.github/actions/waffles@52.3.7 + uses: cds-snc/notification-utils/.github/actions/waffles@52.3.8 with: app-loc: '/github/workspace' app-libs: '/github/workspace/env/site-packages' diff --git a/poetry.lock b/poetry.lock index e1e1f9a2d4..5821f41653 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2657,7 +2657,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.3.7" +version = "52.3.8" description = "Shared python code for Notification - Provides logging utils etc." optional = false python-versions = "~3.10.9" @@ -2693,8 +2693,8 @@ werkzeug = "3.0.4" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "52.3.7" -resolved_reference = "42ded1e06cfac4209c0dd9d3131cf202b8e210bd" +reference = "52.3.8" +resolved_reference = "acfde00d2c7bb9713afeb3e67e41cf8bea988e10" [[package]] name = "ordered-set" @@ -4623,4 +4623,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "9f820c94df5fda8d9619bba5e0294cc0a8108e68d7bf73646e5e099157f1fe4a" +content-hash = "494b12d5034586897256f7b42d41a230813912256dc3ff96a4abf9995894c77f" diff --git a/pyproject.toml b/pyproject.toml index 4bce92347b..a6067ff908 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,7 +65,7 @@ Werkzeug = "3.0.4" MarkupSafe = "2.1.5" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.2.0" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.7" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.8" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.12.2" From e197b993208820a9fe91c00cca9ffa06521ade87 Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Thu, 14 Nov 2024 11:27:08 -0400 Subject: [PATCH 03/10] Revert "Bump utils for annual limit client fixes (#2352)" (#2353) This reverts commit b93858563b7b3029e12b6163b16dcdae2be2c60e. --- .github/workflows/test.yaml | 2 +- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 43657d10c6..bdffe264a4 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -69,7 +69,7 @@ jobs: run: | cp -f .env.example .env - name: Checks for new endpoints against AWS WAF rules - uses: cds-snc/notification-utils/.github/actions/waffles@52.3.8 + uses: cds-snc/notification-utils/.github/actions/waffles@52.3.7 with: app-loc: '/github/workspace' app-libs: '/github/workspace/env/site-packages' diff --git a/poetry.lock b/poetry.lock index 5821f41653..e1e1f9a2d4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2657,7 +2657,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.3.8" +version = "52.3.7" description = "Shared python code for Notification - Provides logging utils etc." optional = false python-versions = "~3.10.9" @@ -2693,8 +2693,8 @@ werkzeug = "3.0.4" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "52.3.8" -resolved_reference = "acfde00d2c7bb9713afeb3e67e41cf8bea988e10" +reference = "52.3.7" +resolved_reference = "42ded1e06cfac4209c0dd9d3131cf202b8e210bd" [[package]] name = "ordered-set" @@ -4623,4 +4623,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "494b12d5034586897256f7b42d41a230813912256dc3ff96a4abf9995894c77f" +content-hash = "9f820c94df5fda8d9619bba5e0294cc0a8108e68d7bf73646e5e099157f1fe4a" diff --git a/pyproject.toml b/pyproject.toml index a6067ff908..4bce92347b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,7 +65,7 @@ Werkzeug = "3.0.4" MarkupSafe = "2.1.5" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.2.0" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.8" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.7" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.12.2" From ffa8dbdf06ba271ccb64c775beb6eb48cc632dc2 Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Thu, 14 Nov 2024 11:35:23 -0400 Subject: [PATCH 04/10] Revert "Clear Redis notification counts during nightly fact job (#2344)" (#2354) This reverts commit ad21ce1b741537f6cb12e33599596ba69bb3f2b6. --- .github/workflows/test.yaml | 2 +- app/celery/process_pinpoint_receipts_tasks.py | 40 +--- app/celery/process_ses_receipts_tasks.py | 29 +-- app/celery/process_sns_receipts_tasks.py | 28 +-- app/celery/reporting_tasks.py | 6 +- app/dao/fact_notification_status_dao.py | 20 +- app/service/rest.py | 8 +- app/utils.py | 35 ---- poetry.lock | 8 +- pyproject.toml | 2 +- .../test_process_pinpoint_receipts_tasks.py | 181 +++++++----------- .../celery/test_process_ses_receipts_tasks.py | 38 ---- .../celery/test_process_sns_receipts_tasks.py | 157 ++++++--------- tests/app/celery/test_reporting_tasks.py | 23 --- tests/app/service/test_statistics_rest.py | 58 +++--- 15 files changed, 177 insertions(+), 458 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index bdffe264a4..037f12dbb5 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -69,7 +69,7 @@ jobs: run: | cp -f .env.example .env - name: Checks for new endpoints against AWS WAF rules - uses: cds-snc/notification-utils/.github/actions/waffles@52.3.7 + uses: cds-snc/notification-utils/.github/actions/waffles@52.3.6 with: app-loc: '/github/workspace' app-libs: '/github/workspace/env/site-packages' diff --git a/app/celery/process_pinpoint_receipts_tasks.py b/app/celery/process_pinpoint_receipts_tasks.py index 5edd796169..d1732c9b38 100644 --- a/app/celery/process_pinpoint_receipts_tasks.py +++ b/app/celery/process_pinpoint_receipts_tasks.py @@ -3,15 +3,11 @@ from flask import current_app, json from notifications_utils.statsd_decorators import statsd -from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.orm.exc import NoResultFound from app import annual_limit_client, notify_celery, statsd_client from app.config import QueueNames from app.dao import notifications_dao -from app.dao.fact_notification_status_dao import ( - fetch_notification_status_for_service_for_day, -) from app.models import ( NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE, @@ -21,7 +17,6 @@ PINPOINT_PROVIDER, ) from app.notifications.callbacks import _check_and_queue_callback_task -from app.utils import prepare_notification_counts_for_seeding from celery.exceptions import Retry # Pinpoint receipts are of the form: @@ -111,22 +106,6 @@ def process_pinpoint_results(self, response): sms_origination_phone_number=origination_phone_number, ) - service_id = notification.service_id - # Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification - # is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count. - notifications_to_seed = None - - if current_app.config["FF_ANNUAL_LIMIT"]: - if not annual_limit_client.was_seeded_today(service_id): - annual_limit_client.set_seeded_at(service_id) - notifications_to_seed = fetch_notification_status_for_service_for_day( - convert_utc_to_local_timezone(datetime.utcnow()), - service_id=service_id, - ) - annual_limit_client.seed_annual_limit_notifications( - service_id, prepare_notification_counts_for_seeding(notifications_to_seed) - ) - if notification_status != NOTIFICATION_DELIVERED: current_app.logger.info( ( @@ -136,34 +115,25 @@ def process_pinpoint_results(self, response): ) # TODO FF_ANNUAL_LIMIT removal if current_app.config["FF_ANNUAL_LIMIT"]: - # Only increment if we didn't just seed. - if notifications_to_seed is None: - annual_limit_client.increment_sms_failed(service_id) + annual_limit_client.increment_sms_failed(notification.service_id) current_app.logger.info( - f"Incremented sms_delivered count in Redis. Service: {service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(service_id)}" + f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) else: current_app.logger.info( f"Pinpoint callback return status of {notification_status} for notification: {notification.id}" ) - # TODO FF_ANNUAL_LIMIT removal if current_app.config["FF_ANNUAL_LIMIT"]: - # Only increment if we didn't just seed. - if notifications_to_seed is None: - annual_limit_client.increment_sms_delivered(service_id) + annual_limit_client.increment_sms_delivered(notification.service_id) current_app.logger.info( - f"Incremented sms_delivered count in Redis. Service: {service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(service_id)}" + f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) statsd_client.incr(f"callback.pinpoint.{notification_status}") if notification.sent_at: - statsd_client.timing_with_dates( - "callback.pinpoint.elapsed-time", - datetime.utcnow(), - notification.sent_at, - ) + statsd_client.timing_with_dates("callback.pinpoint.elapsed-time", datetime.utcnow(), notification.sent_at) _check_and_queue_callback_task(notification) diff --git a/app/celery/process_ses_receipts_tasks.py b/app/celery/process_ses_receipts_tasks.py index 5d2c29b133..86811bb720 100644 --- a/app/celery/process_ses_receipts_tasks.py +++ b/app/celery/process_ses_receipts_tasks.py @@ -2,15 +2,11 @@ from flask import current_app, json from notifications_utils.statsd_decorators import statsd -from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.orm.exc import NoResultFound from app import annual_limit_client, bounce_rate_client, notify_celery, statsd_client from app.config import QueueNames from app.dao import notifications_dao -from app.dao.fact_notification_status_dao import ( - fetch_notification_status_for_service_for_day, -) from app.models import NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE from app.notifications.callbacks import _check_and_queue_callback_task from app.notifications.notifications_ses_callback import ( @@ -18,7 +14,6 @@ get_aws_responses, handle_complaint, ) -from app.utils import prepare_notification_counts_for_seeding from celery.exceptions import Retry @@ -88,22 +83,6 @@ def process_ses_results(self, response): # noqa: C901 bounce_response=aws_response_dict.get("bounce_response", None), ) - service_id = notification.service_id - # Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification - # is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count. - notifications_to_seed = None - # Check if we have already seeded the annual limit counts for today - if current_app.config["FF_ANNUAL_LIMIT"]: - if not annual_limit_client.was_seeded_today(service_id): - annual_limit_client.set_seeded_at(service_id) - notifications_to_seed = fetch_notification_status_for_service_for_day( - convert_utc_to_local_timezone(datetime.utcnow()), - service_id=service_id, - ) - annual_limit_client.seed_annual_limit_notifications( - service_id, prepare_notification_counts_for_seeding(notifications_to_seed) - ) - if not aws_response_dict["success"]: current_app.logger.info( "SES delivery failed: notification id {} and reference {} has error found. Status {}".format( @@ -111,9 +90,7 @@ def process_ses_results(self, response): # noqa: C901 ) ) if current_app.config["FF_ANNUAL_LIMIT"]: - # Only increment if we didn't just seed. - if notifications_to_seed is None: - annual_limit_client.increment_email_failed(notification.service_id) + annual_limit_client.increment_email_failed(notification.service_id) current_app.logger.info( f"Incremented email_failed count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) @@ -122,9 +99,7 @@ def process_ses_results(self, response): # noqa: C901 "SES callback return status of {} for notification: {}".format(notification_status, notification.id) ) if current_app.config["FF_ANNUAL_LIMIT"]: - # Only increment if we didn't just seed. - if notifications_to_seed is None: - annual_limit_client.increment_email_delivered(notification.service_id) + annual_limit_client.increment_email_delivered(notification.service_id) current_app.logger.info( f"Incremented email_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) diff --git a/app/celery/process_sns_receipts_tasks.py b/app/celery/process_sns_receipts_tasks.py index 326784f3e3..1ca9608a6a 100644 --- a/app/celery/process_sns_receipts_tasks.py +++ b/app/celery/process_sns_receipts_tasks.py @@ -2,15 +2,11 @@ from flask import current_app, json from notifications_utils.statsd_decorators import statsd -from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.orm.exc import NoResultFound from app import annual_limit_client, notify_celery, statsd_client from app.config import QueueNames from app.dao import notifications_dao -from app.dao.fact_notification_status_dao import ( - fetch_notification_status_for_service_for_day, -) from app.models import ( NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE, @@ -20,7 +16,6 @@ SNS_PROVIDER, ) from app.notifications.callbacks import _check_and_queue_callback_task -from app.utils import prepare_notification_counts_for_seeding from celery.exceptions import Retry @@ -69,21 +64,6 @@ def process_sns_results(self, response): provider_response=provider_response, ) - service_id = notification.service_id - # Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification - # is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count. - notifications_to_seed = None - if current_app.config["FF_ANNUAL_LIMIT"]: - if not annual_limit_client.was_seeded_today(service_id): - annual_limit_client.set_seeded_at(service_id) - notifications_to_seed = fetch_notification_status_for_service_for_day( - convert_utc_to_local_timezone(datetime.utcnow()), - service_id=service_id, - ) - annual_limit_client.seed_annual_limit_notifications( - service_id, prepare_notification_counts_for_seeding(notifications_to_seed) - ) - if notification_status != NOTIFICATION_DELIVERED: current_app.logger.info( ( @@ -93,9 +73,7 @@ def process_sns_results(self, response): ) # TODO FF_ANNUAL_LIMIT removal if current_app.config["FF_ANNUAL_LIMIT"]: - # Only increment if we didn't just seed. - if notifications_to_seed is None: - annual_limit_client.increment_sms_failed(notification.service_id) + annual_limit_client.increment_sms_failed(notification.service_id) current_app.logger.info( f"Incremented sms_failed count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) @@ -103,9 +81,7 @@ def process_sns_results(self, response): current_app.logger.info(f"SNS callback return status of {notification_status} for notification: {notification.id}") # TODO FF_ANNUAL_LIMIT removal if current_app.config["FF_ANNUAL_LIMIT"]: - # Only increment if we didn't just seed. - if notifications_to_seed is None: - annual_limit_client.increment_sms_delivered(notification.service_id) + annual_limit_client.increment_sms_delivered(notification.service_id) current_app.logger.info( f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}" ) diff --git a/app/celery/reporting_tasks.py b/app/celery/reporting_tasks.py index c74b1de247..8090d42bb7 100644 --- a/app/celery/reporting_tasks.py +++ b/app/celery/reporting_tasks.py @@ -5,7 +5,7 @@ from notifications_utils.statsd_decorators import statsd from notifications_utils.timezones import convert_utc_to_local_timezone -from app import annual_limit_client, notify_celery +from app import notify_celery from app.config import QueueNames from app.cronitor import cronitor from app.dao.annual_limits_data_dao import get_previous_quarter, insert_quarter_data @@ -113,10 +113,6 @@ def create_nightly_notification_status_for_day(process_day): len(transit_data), process_day, chunk ) ) - # TODO: FF_ANNUAL_LIMIT removal - if current_app.config["FF_ANNUAL_LIMIT"]: - annual_limit_client.reset_all_notification_counts(chunk) - except Exception as e: current_app.logger.error( "create-nightly-notification-status-for-day task failed for day: {}, for service_ids: {}. Error: {}".format( diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 75e0ca1371..dc3ec5be7d 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -130,18 +130,6 @@ def update_fact_notification_status(data, process_day, service_ids=None): def fetch_notification_status_for_service_by_month(start_date, end_date, service_id): - filters = [ - FactNotificationStatus.service_id == service_id, - FactNotificationStatus.bst_date >= start_date.strftime("%Y-%m-%d"), - # This works only for timezones to the west of GMT - FactNotificationStatus.bst_date < end_date.strftime("%Y-%m-%d"), - FactNotificationStatus.key_type != KEY_TYPE_TEST, - ] - - # TODO FF_ANNUAL_LIMIT removal - if current_app.config["FF_ANNUAL_LIMIT"]: - filters.append(FactNotificationStatus.bst_date != datetime.utcnow().date().strftime("%Y-%m-%d")) - return ( db.session.query( func.date_trunc("month", FactNotificationStatus.bst_date).label("month"), @@ -149,7 +137,13 @@ def fetch_notification_status_for_service_by_month(start_date, end_date, service FactNotificationStatus.notification_status, func.sum(FactNotificationStatus.notification_count).label("count"), ) - .filter(*filters) + .filter( + FactNotificationStatus.service_id == service_id, + FactNotificationStatus.bst_date >= start_date.strftime("%Y-%m-%d"), + # This works only for timezones to the west of GMT + FactNotificationStatus.bst_date < end_date.strftime("%Y-%m-%d"), + FactNotificationStatus.key_type != KEY_TYPE_TEST, + ) .group_by( func.date_trunc("month", FactNotificationStatus.bst_date).label("month"), FactNotificationStatus.notification_type, diff --git a/app/service/rest.py b/app/service/rest.py index ed3d9b7752..84c2682820 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -404,10 +404,7 @@ def create_api_key(service_id=None): unsigned_api_key = get_unsigned_secret(valid_api_key.id) # prefix the API key so they keys can be easily identified for security scanning - keydata = { - "key": unsigned_api_key, - "key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name, - } + keydata = {"key": unsigned_api_key, "key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name} return jsonify(data=keydata), 201 @@ -634,8 +631,7 @@ def get_monthly_notification_stats(service_id): statistics.add_monthly_notification_status_stats(data, stats) now = datetime.utcnow() - # TODO FF_ANNUAL_LIMIT removal - if not current_app.config["FF_ANNUAL_LIMIT"] and end_date > now: + if end_date > now: todays_deltas = fetch_notification_status_for_service_for_day(convert_utc_to_local_timezone(now), service_id=service_id) statistics.add_monthly_notification_status_stats(data, todays_deltas) diff --git a/app/utils.py b/app/utils.py index 165991a71d..1836093753 100644 --- a/app/utils.py +++ b/app/utils.py @@ -15,16 +15,6 @@ local_timezone = pytz.timezone(os.getenv("TIMEZONE", "America/Toronto")) -DELIVERED_STATUSES = ["delivered", "sent", "returned-letter"] -FAILURE_STATUSES = [ - "failed", - "temporary-failure", - "permanent-failure", - "technical-failure", - "virus-scan-failed", - "validation-failed", -] - def pagination_links(pagination, endpoint, **kwargs): if "page" in kwargs: @@ -231,28 +221,3 @@ def get_limit_reset_time_et() -> dict[str, str]: limit_reset_time_et = {"12hr": next_midnight_utc_in_et.strftime("%-I%p"), "24hr": next_midnight_utc_in_et.strftime("%H")} return limit_reset_time_et - - -def prepare_notification_counts_for_seeding(notification_counts: list) -> dict: - """Utility method that transforms a list of notification counts into a dictionary, mapping notification counts by type and success/failure. - Used to seed notification counts in Redis for annual limits. - e.g. - ``` - [(datetime, 'email', 'sent', 1), - (datetime, 'sms', 'sent', 2)] - ``` - Becomes: - ``` - {'email_sent': 1, 'sms_sent': 2} - ``` - Args: - notification_counts (list): A list of tuples containing (date, notification_type, status, count) - - Returns: - dict: That acts as a mapping to build the notification counts in Redis - """ - return { - f"{notification_type}_{'delivered' if status in DELIVERED_STATUSES else 'failed'}": count - for _, notification_type, status, count in notification_counts - if status in DELIVERED_STATUSES or status in FAILURE_STATUSES - } diff --git a/poetry.lock b/poetry.lock index e1e1f9a2d4..b2ee531e85 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2657,7 +2657,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.3.7" +version = "52.3.6" description = "Shared python code for Notification - Provides logging utils etc." optional = false python-versions = "~3.10.9" @@ -2693,8 +2693,8 @@ werkzeug = "3.0.4" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "52.3.7" -resolved_reference = "42ded1e06cfac4209c0dd9d3131cf202b8e210bd" +reference = "52.3.6" +resolved_reference = "6a57a6b91cdfc18b823ad4476744515f89d4b7be" [[package]] name = "ordered-set" @@ -4623,4 +4623,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "9f820c94df5fda8d9619bba5e0294cc0a8108e68d7bf73646e5e099157f1fe4a" +content-hash = "d6297cc2dcb00a6b90e6a5914a1f5357cce172e853e12f9d221375f276c516d3" diff --git a/pyproject.toml b/pyproject.toml index 4bce92347b..fc9abadfaa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,7 +65,7 @@ Werkzeug = "3.0.4" MarkupSafe = "2.1.5" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.2.0" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.7" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.6" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.12.2" diff --git a/tests/app/celery/test_process_pinpoint_receipts_tasks.py b/tests/app/celery/test_process_pinpoint_receipts_tasks.py index ec30fbe563..84c72c3615 100644 --- a/tests/app/celery/test_process_pinpoint_receipts_tasks.py +++ b/tests/app/celery/test_process_pinpoint_receipts_tasks.py @@ -281,120 +281,75 @@ def test_process_pinpoint_results_calls_service_callback(sample_template, notify ) -class TestAnnualLimits: - @pytest.mark.parametrize( - "provider_response", - [ - "Blocked as spam by phone carrier", - "Destination is on a blocked list", - "Invalid phone number", - "Message body is invalid", - "Phone carrier has blocked this message", - "Phone carrier is currently unreachable/unavailable", - "Phone has blocked SMS", - "Phone is on a blocked list", - "Phone is currently unreachable/unavailable", - "Phone number is opted out", - "This delivery would exceed max price", - "Unknown error attempting to reach phone", - ], - ) - def test_process_pinpoint_results_should_increment_sms_failed_when_delivery_receipt_is_failure( - self, - sample_sms_template_with_html, - notify_api, - mocker, - provider_response, - ): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="pinpoint", - ) +@pytest.mark.parametrize( + "provider_response", + [ + "Blocked as spam by phone carrier", + "Destination is on a blocked list", + "Invalid phone number", + "Message body is invalid", + "Phone carrier has blocked this message", + "Phone carrier is currently unreachable/unavailable", + "Phone has blocked SMS", + "Phone is on a blocked list", + "Phone is currently unreachable/unavailable", + "Phone number is opted out", + "This delivery would exceed max price", + "Unknown error attempting to reach phone", + ], +) +def test_process_pinpoint_results_should_increment_sms_failed_when_delivery_receipt_is_failure( + sample_sms_template_with_html, + notify_api, + mocker, + provider_response, +): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="pinpoint", ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - process_pinpoint_results(pinpoint_failed_callback(reference="ref", provider_response=provider_response)) - annual_limit_client.increment_sms_failed.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_delivered.assert_not_called() - - @pytest.mark.parametrize( - "callback", - [ - (pinpoint_delivered_callback), - (pinpoint_shortcode_delivered_callback), - ], ) - def test_process_pinpoint_results_should_increment_sms_delivered_when_delivery_receipt_is_success( - self, - sample_sms_template_with_html, - notify_api, - mocker, - callback, - ): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="pinpoint", - ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + process_pinpoint_results(pinpoint_failed_callback(reference="ref", provider_response=provider_response)) + annual_limit_client.increment_sms_failed.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_delivered.assert_not_called() + + +@pytest.mark.parametrize( + "callback", + [ + (pinpoint_delivered_callback), + (pinpoint_shortcode_delivered_callback), + ], +) +def test_process_pinpoint_results_should_increment_sms_delivered_when_delivery_receipt_is_success( + sample_sms_template_with_html, + notify_api, + mocker, + callback, +): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="pinpoint", ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - process_pinpoint_results(callback(reference="ref")) - annual_limit_client.increment_sms_delivered.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_failed.assert_not_called() - - @pytest.mark.parametrize( - "callback, provider_response", - [ - (pinpoint_delivered_callback, None), - (pinpoint_failed_callback, "Blocked as spam by phone carrier"), - (pinpoint_failed_callback, "Phone carrier is currently unreachable/unavailable"), - (pinpoint_failed_callback, "Phone is currently unreachable/unavailable"), - (pinpoint_failed_callback, "This is not a real response"), - ], ) - def test_process_pinpoint_results_seeds_annual_limit_notifications_when_not_seeded_today_and_doesnt_increment_when_seeding( - self, - callback, - provider_response, - sample_sms_template_with_html, - notify_api, - mocker, - ): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=False) - mocker.patch("app.annual_limit_client.set_seeded_at") - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="pinpoint", - ) - ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - process_pinpoint_results( - callback(provider_response, reference="ref") if provider_response else callback(reference="ref") - ) - annual_limit_client.set_seeded_at.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_delivered.assert_not_called() - annual_limit_client.increment_sms_failed.assert_not_called() + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + process_pinpoint_results(callback(reference="ref")) + annual_limit_client.increment_sms_delivered.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_failed.assert_not_called() diff --git a/tests/app/celery/test_process_ses_receipts_tasks.py b/tests/app/celery/test_process_ses_receipts_tasks.py index 42e669282c..117a6a41e2 100644 --- a/tests/app/celery/test_process_ses_receipts_tasks.py +++ b/tests/app/celery/test_process_ses_receipts_tasks.py @@ -446,7 +446,6 @@ def test_ses_callback_should_increment_email_delivered_when_delivery_receipt_is_ ): mocker.patch("app.annual_limit_client.increment_email_delivered") mocker.patch("app.annual_limit_client.increment_email_failed") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) # TODO FF_ANNUAL_LIMIT removal with set_config(notify_api, "FF_ANNUAL_LIMIT", True): @@ -469,7 +468,6 @@ def test_ses_callback_should_increment_email_failed_when_delivery_receipt_is_fai ): mocker.patch("app.annual_limit_client.increment_email_failed") mocker.patch("app.annual_limit_client.increment_email_delivered") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) # TODO FF_ANNUAL_LIMIT removal with set_config(notify_api, "FF_ANNUAL_LIMIT", True): @@ -478,39 +476,3 @@ def test_ses_callback_should_increment_email_failed_when_delivery_receipt_is_fai assert process_ses_results(callback(reference="ref")) annual_limit_client.increment_email_failed.assert_called_once_with(sample_email_template.service_id) annual_limit_client.increment_email_delivered.assert_not_called() - - @pytest.mark.parametrize( - "callback", - [ - ses_notification_callback, - ses_hard_bounce_callback, - ses_soft_bounce_callback, - ], - ) - def test_process_ses_results_seeds_annual_limit_notifications_when_not_seeded_today_and_doesnt_increment_when_seeding( - self, - callback, - sample_email_template, - notify_api, - mocker, - ): - mocker.patch("app.annual_limit_client.increment_email_delivered") - mocker.patch("app.annual_limit_client.increment_email_failed") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=False) - mocker.patch("app.annual_limit_client.set_seeded_at") - - notification = save_notification( - create_notification( - sample_email_template, - reference="ref", - sent_at=datetime.utcnow(), - status="sending", - sent_by="ses", - ) - ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - process_ses_results(callback(reference="ref")) - annual_limit_client.set_seeded_at.assert_called_once_with(notification.service_id) - annual_limit_client.increment_email_delivered.assert_not_called() - annual_limit_client.increment_email_failed.assert_not_called() diff --git a/tests/app/celery/test_process_sns_receipts_tasks.py b/tests/app/celery/test_process_sns_receipts_tasks.py index 79cd534a3e..c443b5d0d3 100644 --- a/tests/app/celery/test_process_sns_receipts_tasks.py +++ b/tests/app/celery/test_process_sns_receipts_tasks.py @@ -189,111 +189,66 @@ def test_process_sns_results_calls_service_callback(sample_template, notify_db_s statsd_client.incr.assert_any_call("callback.sns.delivered") updated_notification = get_notification_by_id(notification.id) signed_data = create_delivery_status_callback_data(updated_notification, callback_api) - send_mock.assert_called_once_with( - [str(notification.id), signed_data, notification.service_id], - queue="service-callbacks", - ) + send_mock.assert_called_once_with([str(notification.id), signed_data, notification.service_id], queue="service-callbacks") + +def test_sns_callback_should_increment_sms_delivered_when_delivery_receipt_is_delivered( + sample_sms_template_with_html, notify_api, mocker +): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") -class TestAnnualLimit: - def test_sns_callback_should_increment_sms_delivered_when_delivery_receipt_is_delivered( - self, sample_sms_template_with_html, notify_api, mocker - ): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="sns", - ) + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="sns", ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - assert process_sns_results(sns_success_callback(reference="ref")) + ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + assert process_sns_results(sns_success_callback(reference="ref")) - annual_limit_client.increment_sms_delivered.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_failed.assert_not_called() + annual_limit_client.increment_sms_delivered.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_failed.assert_not_called() - @pytest.mark.parametrize( - "provider_response", - [ - "Blocked as spam by phone carrier", - "Destination is on a blocked list", - "Invalid phone number", - "Message body is invalid", - "Phone carrier has blocked this message", - "Phone carrier is currently unreachable/unavailable", - "Phone has blocked SMS", - "Phone is on a blocked list", - "Phone is currently unreachable/unavailable", - "Phone number is opted out", - "This delivery would exceed max price", - "Unknown error attempting to reach phone", - ], - ) - def test_sns_callback_should_increment_sms_failed_when_delivery_receipt_is_failure( - self, sample_sms_template_with_html, notify_api, mocker, provider_response - ): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=True) - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="sns", - ) - ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - assert process_sns_results(sns_failed_callback(reference="ref", provider_response=provider_response)) - annual_limit_client.increment_sms_failed.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_delivered.assert_not_called() - - @pytest.mark.parametrize( - "callback, provider_response", - [ - (sns_success_callback, None), - (sns_failed_callback, "Blocked as spam by phone carrier"), - (sns_failed_callback, "Phone carrier is currently unreachable/unavailable"), - (sns_failed_callback, "Phone is currently unreachable/unavailable"), - (sns_failed_callback, "This is not a real response"), - ], - ) - def test_process_sns_results_seeds_annual_limit_notifications_when_not_seeded_today_and_doesnt_increment_when_seeding( - self, - callback, - provider_response, - sample_sms_template_with_html, - notify_api, - mocker, - ): - mocker.patch("app.annual_limit_client.increment_sms_delivered") - mocker.patch("app.annual_limit_client.increment_sms_failed") - mocker.patch("app.annual_limit_client.was_seeded_today", return_value=False) - mocker.patch("app.annual_limit_client.set_seeded_at") - - notification = save_notification( - create_notification( - sample_sms_template_with_html, - reference="ref", - sent_at=datetime.utcnow(), - status=NOTIFICATION_SENT, - sent_by="sns", - ) +@pytest.mark.parametrize( + "provider_response", + [ + "Blocked as spam by phone carrier", + "Destination is on a blocked list", + "Invalid phone number", + "Message body is invalid", + "Phone carrier has blocked this message", + "Phone carrier is currently unreachable/unavailable", + "Phone has blocked SMS", + "Phone is on a blocked list", + "Phone is currently unreachable/unavailable", + "Phone number is opted out", + "This delivery would exceed max price", + "Unknown error attempting to reach phone", + ], +) +def test_sns_callback_should_increment_sms_failed_when_delivery_receipt_is_failure( + sample_sms_template_with_html, notify_api, mocker, provider_response +): + mocker.patch("app.annual_limit_client.increment_sms_delivered") + mocker.patch("app.annual_limit_client.increment_sms_failed") + + notification = save_notification( + create_notification( + sample_sms_template_with_html, + reference="ref", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="sns", ) - # TODO FF_ANNUAL_LIMIT removal - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - process_sns_results(callback(provider_response, reference="ref") if provider_response else callback(reference="ref")) - annual_limit_client.set_seeded_at.assert_called_once_with(notification.service_id) - annual_limit_client.increment_sms_delivered.assert_not_called() - annual_limit_client.increment_sms_failed.assert_not_called() + ) + # TODO FF_ANNUAL_LIMIT removal + with set_config(notify_api, "FF_ANNUAL_LIMIT", True): + assert process_sns_results(sns_failed_callback(reference="ref", provider_response=provider_response)) + annual_limit_client.increment_sms_failed.assert_called_once_with(notification.service_id) + annual_limit_client.increment_sms_delivered.assert_not_called() diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index 6b987fcb9a..53ceb547ca 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -1,4 +1,3 @@ -import uuid from datetime import date, datetime, timedelta from decimal import Decimal @@ -31,10 +30,8 @@ create_rate, create_service, create_template, - create_user, save_notification, ) -from tests.conftest import set_config def mocker_get_rate( @@ -587,26 +584,6 @@ def test_create_nightly_notification_status_for_day_respects_local_timezone( assert noti_status[0].notification_status == "created" -@freeze_time("2019-04-01T5:30") -def test_create_nightly_notification_status_for_day_clears_failed_delivered_notification_counts( - sample_template, notify_api, mocker -): - mock_reset_counts = mocker.patch("app.annual_limit_client.reset_all_notification_counts") - for i in range(39): - user = create_user(email=f"test{i}@test.ca", mobile_number=f"{i}234567890") - service = create_service(service_id=uuid.uuid4(), service_name=f"service{i}", user=user, email_from=f"best.email{i}") - template_sms = create_template(service=service) - template_email = create_template(service=service, template_type="email") - save_notification(create_notification(template_sms, status="sent", created_at=datetime(2019, 4, 1, 5, 0))) - save_notification(create_notification(template_email, status="sent", created_at=datetime(2019, 4, 1, 5, 0))) - save_notification(create_notification(template_sms, status="failed", created_at=datetime(2019, 4, 1, 5, 0))) - save_notification(create_notification(template_email, status="delivered", created_at=datetime(2019, 4, 1, 5, 0))) - - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - create_nightly_notification_status_for_day("2019-04-01") - assert mock_reset_counts.call_count == 2 - - class TestInsertQuarterData: def test_insert_quarter_data(self, notify_db_session): service_1 = create_service(service_name="service_1") diff --git a/tests/app/service/test_statistics_rest.py b/tests/app/service/test_statistics_rest.py index a978979248..907d2fb5d2 100644 --- a/tests/app/service/test_statistics_rest.py +++ b/tests/app/service/test_statistics_rest.py @@ -20,7 +20,6 @@ create_template, save_notification, ) -from tests.conftest import set_config @freeze_time("2017-11-11 06:00") @@ -223,7 +222,7 @@ def test_get_monthly_notification_stats_returns_stats(admin_request, sample_serv @freeze_time("2016-06-05 00:00:00") # This test assumes the local timezone is EST -def test_get_monthly_notification_stats_combines_todays_data_and_historic_stats(admin_request, notify_api, sample_template): +def test_get_monthly_notification_stats_combines_todays_data_and_historic_stats(admin_request, sample_template): create_ft_notification_status(datetime(2016, 5, 1), template=sample_template, count=1) create_ft_notification_status( datetime(2016, 6, 1), @@ -238,27 +237,27 @@ def test_get_monthly_notification_stats_combines_todays_data_and_historic_stats( # this doesn't get returned in the stats because it is old - it should be in ft_notification_status by now save_notification(create_notification(sample_template, created_at=datetime(2016, 6, 4), status="sending")) - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - response = admin_request.get( - "service.get_monthly_notification_stats", - service_id=sample_template.service_id, - year=2016, - ) + response = admin_request.get( + "service.get_monthly_notification_stats", + service_id=sample_template.service_id, + year=2016, + ) - assert len(response["data"]) == 3 # apr, may, jun - assert response["data"]["2016-05"] == { - "sms": {"delivered": 1}, - "email": {}, - "letter": {}, - } - assert response["data"]["2016-06"] == { - "sms": { - # combines the stats from the historic ft_notification_status and the current notifications - "created": 2, - }, - "email": {}, - "letter": {}, - } + assert len(response["data"]) == 3 # apr, may, jun + assert response["data"]["2016-05"] == { + "sms": {"delivered": 1}, + "email": {}, + "letter": {}, + } + assert response["data"]["2016-06"] == { + "sms": { + # combines the stats from the historic ft_notification_status and the current notifications + "created": 3, + "delivered": 1, + }, + "email": {}, + "letter": {}, + } # This test assumes the local timezone is EST @@ -296,7 +295,7 @@ def test_get_monthly_notification_stats_checks_dates(admin_request, sample_servi assert response["data"]["2017-03"]["sms"] == {"delivered": 1} -def test_get_monthly_notification_stats_only_gets_for_one_service(admin_request, notify_api, notify_db_session): +def test_get_monthly_notification_stats_only_gets_for_one_service(admin_request, notify_db_session): services = [create_service(), create_service(service_name="2")] templates = [create_template(services[0]), create_template(services[1])] @@ -304,11 +303,10 @@ def test_get_monthly_notification_stats_only_gets_for_one_service(admin_request, create_ft_notification_status(datetime(2016, 6, 1), template=templates[0], notification_status="created") create_ft_notification_status(datetime(2016, 6, 1), template=templates[1], notification_status="delivered") - with set_config(notify_api, "FF_ANNUAL_LIMIT", True): - response = admin_request.get("service.get_monthly_notification_stats", service_id=services[0].id, year=2016) + response = admin_request.get("service.get_monthly_notification_stats", service_id=services[0].id, year=2016) - assert response["data"]["2016-06"] == { - "sms": {"created": 1}, - "email": {}, - "letter": {}, - } + assert response["data"]["2016-06"] == { + "sms": {"created": 1}, + "email": {}, + "letter": {}, + } From 9f745f9110f1491c3ceab46034c193326638ccd2 Mon Sep 17 00:00:00 2001 From: Jimmy Royer Date: Thu, 14 Nov 2024 15:15:15 -0500 Subject: [PATCH 05/10] Accommodate slower NewRelic initialization (+goodies) (#2351) * Bringing in changes from newrelic slower initialization work sessions * Fix the unavail POETRY_VENV_PATH env var to the vscode launchers * Upgrading to NewRelic 9.1.1 and locking dependencies * Simplifying launcher; it won't work for now but we'll look at it later * Remove .gitattributes from being git managed --- .devcontainer/Dockerfile | 6 ++ .devcontainer/devcontainer.json | 3 +- .devcontainer/docker-compose.yml | 4 - .../scripts/notify-dev-entrypoint.sh | 4 + .dockerignore | 3 +- .vscode/launch.json | 44 ++++++++++- application.py | 2 +- gunicorn_config.py | 19 ++++- newrelic.ini | 3 + poetry.lock | 79 +++++++++++++++---- pyproject.toml | 4 +- 11 files changed, 138 insertions(+), 33 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 84a565d3de..0532009828 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -11,13 +11,17 @@ RUN apt-get update \ 2>&1 \ && apt-get -y install \ curl \ + dbus-x11 \ dnsutils \ emacs \ exa \ fd-find \ + fzf \ git \ + graphviz \ iproute2 \ iputils-ping \ + kcachegrind \ less \ libsodium-dev \ lsb-release \ @@ -28,6 +32,8 @@ RUN apt-get update \ npm \ openssh-client \ procps \ + pyprof2calltree \ + ripgrep \ sudo \ tldr \ unzip \ diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 243b733518..1e9bc814ab 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -25,7 +25,8 @@ "eamodio.gitlens", "fill-labs.dependi", "GitHub.copilot", - "GitHub.copilot-labs", + "github.copilot-chat", + "github.vscode-pull-request-github", "googlecloudtools.cloudcode", "kaiwood.center-editor-window", "matangover.mypy", diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index fe6ff395e3..50d2f7fd71 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -12,10 +12,6 @@ services: volumes: - ..:/workspace:cached command: sleep infinity - ports: - - 8000:8000 - - 8001:8001 - - 6011:6011 links: - db expose: diff --git a/.devcontainer/scripts/notify-dev-entrypoint.sh b/.devcontainer/scripts/notify-dev-entrypoint.sh index 6db49cc637..e9bc5df109 100755 --- a/.devcontainer/scripts/notify-dev-entrypoint.sh +++ b/.devcontainer/scripts/notify-dev-entrypoint.sh @@ -30,6 +30,10 @@ echo -e "alias smoke-staging='cd /workspace && cp .env_smoke_staging tests_smoke echo -e "alias smoke-prod='cd /workspace && cp .env_smoke_prod tests_smoke/.env && poetry run make smoke-test'" >> ~/.zshrc echo -e "alias smoke-dev='cd /workspace && cp .env_smoke_dev tests_smoke/.env && poetry run make smoke-test'" >> ~/.zshrc +echo -e "# fzf key bindings and completion" >> ~/.zshrc +echo -e "source /usr/share/doc/fzf/examples/key-bindings.zsh" >> ~/.zshrc +echo -e "source /usr/share/doc/fzf/examples/completion.zsh" >> ~/.zshrc + cd /workspace # Poetry autocomplete diff --git a/.dockerignore b/.dockerignore index 4f509e525f..d8d4252864 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,2 @@ -*.env \ No newline at end of file +*.env +tests* \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index 711f43a9c6..a4b6a47d81 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -3,14 +3,28 @@ "configurations": [ { "name": "Python: current file", - "type": "python", + "type": "debugpy", "request": "launch", "program": "${file}", "console": "integratedTerminal" }, + { + "name": "Python: Gunicorn", + "type": "debugpy", + "request": "launch", + "program": "gunicorn", + "gevent": true, + "args": ["--config", "gunicorn_config.py", "application"], + "env": { + "FLASK_APP": "application.py", + "FLASK_ENV": "development" + }, + "justMyCode": false, + "console": "integratedTerminal" + }, { "name": "Python: Flask", - "type": "python", + "type": "debugpy", "request": "launch", "module": "flask", "env": { @@ -28,7 +42,7 @@ }, { "name": "Python: Celery", - "type": "python", + "type": "debugpy", "request": "launch", "module": "celery", "console": "integratedTerminal", @@ -48,7 +62,7 @@ }, { "name": "Locust", - "type": "python", + "type": "debugpy", "request": "launch", "module": "locust", "args": [ @@ -64,6 +78,28 @@ ], "gevent": true, "console": "integratedTerminal" + }, + { + "name": "Python: Current File with profiler", + "type": "debugpy", + "request": "launch", + "module": "cProfile", + "env": { + "FLASK_APP": "application.py", + "FLASK_ENV": "development" + }, + "args": [ + "-o", + "/tmp/tmp.prof", + "${file}", + "flask", + "run", + "--no-debugger", + "-p 6011", + "--host=0.0.0.0" + ], + "jinja": true, + "justMyCode": false } ] } \ No newline at end of file diff --git a/application.py b/application.py index 7f554ecd3f..ce8d1d9276 100644 --- a/application.py +++ b/application.py @@ -42,6 +42,6 @@ def handler(event, context): - newrelic.agent.initialize() # noqa: E402 + newrelic.agent.initialize(environment=app.config["NOTIFY_ENVIRONMENT"]) # noqa: E402 newrelic.agent.register_application(timeout=20.0) return apig_wsgi_handler(event, context) diff --git a/gunicorn_config.py b/gunicorn_config.py index e43a7cb288..814e477ea5 100644 --- a/gunicorn_config.py +++ b/gunicorn_config.py @@ -5,7 +5,7 @@ import gunicorn # type: ignore import newrelic.agent # See https://bit.ly/2xBVKBH -newrelic.agent.initialize() # noqa: E402 +newrelic.agent.initialize(environment=os.getenv("NOTIFY_ENVIRONMENT")) # noqa: E402 workers = 4 worker_class = "gevent" @@ -25,15 +25,26 @@ keepalive = 75 # The default graceful timeout period for Kubernetes is 30 seconds, so - # want a lower graceful timeout value for gunicorn so that proper instance - # shutdowns. + # make sure that the timeouts defined here are less than the configured + # Kubernetes timeout. This ensures that the gunicorn worker will exit + # before the Kubernetes pod is terminated. This is important because + # Kubernetes will send a SIGKILL to the pod if it does not terminate + # within the grace period. If the worker is still processing requests + # when it receives the SIGKILL, it will be terminated abruptly and + # will not be able to finish processing the request. This can lead to + # 502 errors being returned to the client. + # + # Also, some libraries such as NewRelic might need some time to finish + # initialization before the worker can start processing requests. The + # timeout values should consider these factors. # # Gunicorn config: # https://docs.gunicorn.org/en/stable/settings.html#graceful-timeout # # Kubernetes config: # https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/ - graceful_timeout = 20 + graceful_timeout = 25 + timeout = 30 def on_starting(server): diff --git a/newrelic.ini b/newrelic.ini index eddd873bfd..f8eb87bd36 100644 --- a/newrelic.ini +++ b/newrelic.ini @@ -192,10 +192,12 @@ error_collector.ignore_errors = app.v2.errors:BadRequestError jsonschema.excepti [newrelic:development] # monitor_mode = false +log_level = debug [newrelic:staging] # app_name = Python Application (Staging) # monitor_mode = true +log_level = debug [newrelic:production] # monitor_mode = true @@ -205,5 +207,6 @@ error_collector.ignore_errors = app.v2.errors:BadRequestError jsonschema.excepti [newrelic:dev] # monitor_mode = false +log_level = debug # --------------------------------------------------------------------------- diff --git a/poetry.lock b/poetry.lock index b2ee531e85..afe7624cec 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1647,6 +1647,17 @@ benchmarks = ["httplib2", "httpx", "requests", "urllib3"] dev = ["dpkt", "pytest", "requests"] examples = ["oauth2"] +[[package]] +name = "gprof2dot" +version = "2024.6.6" +description = "Generate a dot graph from the output of several profilers." +optional = false +python-versions = ">=3.8" +files = [ + {file = "gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696"}, + {file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"}, +] + [[package]] name = "greenlet" version = "2.0.2" @@ -2604,26 +2615,26 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "newrelic" -version = "8.10.0" +version = "9.1.1" description = "New Relic Python Agent" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ - {file = "newrelic-8.10.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:cf3b67327e64d2b50aec855821199b2bc46bc0c2d142df269d420748dd49b31b"}, - {file = "newrelic-8.10.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9601d886669fe1e0c23bbf91fb68ab23086011816ba96c6dd714c60dc0a74088"}, - {file = "newrelic-8.10.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:55a64d2abadf69bbc7bb01178332c4f25247689a97b01a62125d162ea7ec8974"}, - {file = "newrelic-8.10.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:b6cddd869ac8f7f32f6de8212ae878a21c9e63f2183601d239a76d38c5d5a366"}, - {file = "newrelic-8.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9af0130e1f1ca032c606d15a6d5558d27273a063b7c53702218b3beccd50b23"}, - {file = "newrelic-8.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2fd24b32dbf510e4e3fe40b71ad395dd73a4bb9f5eaf59eb5ff22ed76ba2d41"}, - {file = "newrelic-8.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2567ba9e29fd7b9f4c23cf16a5a149097eb0e5da587734c5a40732d75aaec189"}, - {file = "newrelic-8.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9c9f7842234a51e4a2fdafe42c42ebe0b6b1966279f2f91ec8a9c16480c2236"}, - {file = "newrelic-8.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:365d3b1a10d1021217beeb28a93c1356a9feb94bd24f02972691dc71227e40dc"}, - {file = "newrelic-8.10.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd0666557419dbe11b04e3b38480b3113b3c4670d42619420d60352a1956dd8"}, - {file = "newrelic-8.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722072d57e2d416de68b650235878583a2a8809ea39c7dd5c8c11a19089b7665"}, - {file = "newrelic-8.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbda843100c99ac3291701c0a70fedb705c0b0707800c60b93657d3985aae357"}, - {file = "newrelic-8.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ed36fb91f152128825459eae9a52da364352ea95bcd78b405b0a5b8057b2ed7"}, - {file = "newrelic-8.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc975c29548e25805ead794d9de7ab3cb8ba4a6a106098646e1ab03112d1432e"}, - {file = "newrelic-8.10.0.tar.gz", hash = "sha256:8a2271b76ea684a63936302579d6085d46a2b54042cb91dc9b0d71a0cd4dd38b"}, + {file = "newrelic-9.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:639abcaa1baee5a1a137036e328617e3a6bbb33a63814ef6227a8059d9062f0d"}, + {file = "newrelic-9.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:1bd46363c71c3fc5dbcc0e96e1698757f4e7ff82c73a0ccb28f18c2b9f9c5de5"}, + {file = "newrelic-9.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:fbebaf8d9801eef85827ca2906a7bdbc2a458a226455da49c71857e2ce2b264a"}, + {file = "newrelic-9.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:c1a91322fdb301a7a17aab8df2e70925c1e2a01a61136d86a0e433f6ff167c5c"}, + {file = "newrelic-9.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d677f8f11bd69d92a4b359c53a04e02094e5198ed1aa49c9b1ca235522d5efd5"}, + {file = "newrelic-9.1.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:289cebbb86eb92b6c133e18cd5640115a918db41ae791d11c1ded5b592dd7c23"}, + {file = "newrelic-9.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1b93effe99be14a1747a70e9368274091abbed2ef3c593b08a29732d72d83e4"}, + {file = "newrelic-9.1.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45966c5f083d1f1abb76828c4864d20296f6929053fbe4d122e164d84c78e388"}, + {file = "newrelic-9.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e8e6af37cc084007913b991bced2ea2b8b31ed5318a397f210a9625c214459"}, + {file = "newrelic-9.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437dfae553012dbe6e0f288b1f600ef27b0ebc2c367641723529b1a71fa541ae"}, + {file = "newrelic-9.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5489becb7cff1ba992405c9d891fd1743e8042910c97f89ac072fd562501cdb1"}, + {file = "newrelic-9.1.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8403b48807eae524ee1b2da254f4222834f3aef8326dd4475f0b22f924774bde"}, + {file = "newrelic-9.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:603158cb23ed6604845e4fc8ee2a052e60003ccbb7da0f4b00667cd0c7c77c11"}, + {file = "newrelic-9.1.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96dedf6c1a385bf7b4b1336b3e4b5a0d6e983b5305022176c7c43d327e768bf3"}, + {file = "newrelic-9.1.1.tar.gz", hash = "sha256:968a3662ccfb881498789105a52eec866b57c22b8cbdef43889ad76881c62a95"}, ] [package.extras] @@ -3904,6 +3915,20 @@ files = [ {file = "smartypants-2.0.1-py2.py3-none-any.whl", hash = "sha256:8db97f7cbdf08d15b158a86037cd9e116b4cf37703d24e0419a0d64ca5808f0d"}, ] +[[package]] +name = "snakeviz" +version = "2.2.0" +description = "A web-based viewer for Python profiler output" +optional = false +python-versions = ">=3.7" +files = [ + {file = "snakeviz-2.2.0-py2.py3-none-any.whl", hash = "sha256:569e2d71c47f80a886aa6e70d6405cb6d30aa3520969ad956b06f824c5f02b8e"}, + {file = "snakeviz-2.2.0.tar.gz", hash = "sha256:7bfd00be7ae147eb4a170a471578e1cd3f41f803238958b6b8efcf2c698a6aa9"}, +] + +[package.dependencies] +tornado = ">=2.0" + [[package]] name = "sqlalchemy" version = "1.4.52" @@ -4061,6 +4086,26 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + [[package]] name = "tqdm" version = "4.66.5" @@ -4623,4 +4668,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "d6297cc2dcb00a6b90e6a5914a1f5357cce172e853e12f9d221375f276c516d3" +content-hash = "75a9f5035a4c3598836f34a7e9440aff6b4400ced2494a8e5e7625b50e906fef" diff --git a/pyproject.toml b/pyproject.toml index fc9abadfaa..35987c796e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ PyYAML = "6.0.1" cachelib = "0.12.0" SQLAlchemy = "1.4.52" -newrelic = "8.10.0" +newrelic = "9.1.1" notifications-python-client = "6.4.1" python-dotenv = "1.0.1" pwnedpasswords = "2.0.0" @@ -80,6 +80,7 @@ aws-xray-sdk = "2.14.0" [tool.poetry.group.test.dependencies] flake8 = "6.1.0" +gprof2dot = "2024.6.6" isort = "5.13.2" moto = "4.2.14" idna = "2.10" @@ -91,6 +92,7 @@ coveralls = "3.3.1" pytest-xdist = "2.5.0" freezegun = "1.5.1" requests-mock = "1.12.1" +snakeviz = "2.2.0" # optional requirements for jsonschema strict-rfc3339 = "0.7" rfc3987 = "1.3.8" From 193e9cc6f25618784b931d558757fc929efcfacd Mon Sep 17 00:00:00 2001 From: Jimmy Royer Date: Thu, 14 Nov 2024 16:03:19 -0500 Subject: [PATCH 06/10] Increasing the gunicorn graceful timeout at 35 seconds and hard timeout at 40 seconds (#2356) --- gunicorn_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gunicorn_config.py b/gunicorn_config.py index 814e477ea5..9c55167c74 100644 --- a/gunicorn_config.py +++ b/gunicorn_config.py @@ -43,8 +43,8 @@ # # Kubernetes config: # https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/ - graceful_timeout = 25 - timeout = 30 + graceful_timeout = 35 + timeout = 40 def on_starting(server): From 7ff60f66edf67040a706475f2071121bb40a45c3 Mon Sep 17 00:00:00 2001 From: Jimmy Royer Date: Thu, 14 Nov 2024 16:25:23 -0500 Subject: [PATCH 07/10] Increasing gunicorn timeout to 45s graceful / 50s hard (#2357) --- gunicorn_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gunicorn_config.py b/gunicorn_config.py index 9c55167c74..b289f6a6dd 100644 --- a/gunicorn_config.py +++ b/gunicorn_config.py @@ -43,8 +43,8 @@ # # Kubernetes config: # https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/ - graceful_timeout = 35 - timeout = 40 + graceful_timeout = 45 + timeout = 50 def on_starting(server): From 9174a5b8aa31fbb2235f74a5e043fc672e9c800d Mon Sep 17 00:00:00 2001 From: Jimmy Royer Date: Thu, 14 Nov 2024 17:12:51 -0500 Subject: [PATCH 08/10] Taking another approach: let's increase to a bigger value the gunicorn timeout for staging env (#2358) --- gunicorn_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gunicorn_config.py b/gunicorn_config.py index b289f6a6dd..8fe1b452ab 100644 --- a/gunicorn_config.py +++ b/gunicorn_config.py @@ -43,8 +43,8 @@ # # Kubernetes config: # https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/ - graceful_timeout = 45 - timeout = 50 + graceful_timeout = 85 + timeout = 90 def on_starting(server): From af6c98f47fbcbea4e2d83d55c337e36f9eb6acf1 Mon Sep 17 00:00:00 2001 From: Jimmy Royer Date: Fri, 15 Nov 2024 10:04:06 -0500 Subject: [PATCH 09/10] Upgraded to New Relic v9.2.0 (#2360) * Upgraded to New Relic v9.1.2 * Upgrade NewRelic to 9.2.0 to disable package reporting --- newrelic.ini | 11 ++++++++++- poetry.lock | 34 +++++++++++++++++----------------- pyproject.toml | 2 +- 3 files changed, 28 insertions(+), 19 deletions(-) diff --git a/newrelic.ini b/newrelic.ini index f8eb87bd36..f546e92b4f 100644 --- a/newrelic.ini +++ b/newrelic.ini @@ -188,7 +188,16 @@ error_collector.ignore_errors = app.v2.errors:BadRequestError jsonschema.excepti # specific environment will be used when the environment argument to the # newrelic.agent.initialize() function has been defined to be either # "development", "test", "staging" or "production". -# + +# If this setting is enabled, it will capture package and version +# information on startup of the agent that is displayed in the APM +# environment tab. +# In applications that have a large number of packages, having this +# setting enabled may cause a CPU spike as it captures all the package +# and version information. It is recommended in those cases to disable +# this setting. +# Disabling this setting will disable the ability to detect vulnerabilities in outdated packages. +package_reporting.enabled = false [newrelic:development] # monitor_mode = false diff --git a/poetry.lock b/poetry.lock index afe7624cec..0c5a5175ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2615,26 +2615,26 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "newrelic" -version = "9.1.1" +version = "9.2.0" description = "New Relic Python Agent" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ - {file = "newrelic-9.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:639abcaa1baee5a1a137036e328617e3a6bbb33a63814ef6227a8059d9062f0d"}, - {file = "newrelic-9.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:1bd46363c71c3fc5dbcc0e96e1698757f4e7ff82c73a0ccb28f18c2b9f9c5de5"}, - {file = "newrelic-9.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:fbebaf8d9801eef85827ca2906a7bdbc2a458a226455da49c71857e2ce2b264a"}, - {file = "newrelic-9.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:c1a91322fdb301a7a17aab8df2e70925c1e2a01a61136d86a0e433f6ff167c5c"}, - {file = "newrelic-9.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d677f8f11bd69d92a4b359c53a04e02094e5198ed1aa49c9b1ca235522d5efd5"}, - {file = "newrelic-9.1.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:289cebbb86eb92b6c133e18cd5640115a918db41ae791d11c1ded5b592dd7c23"}, - {file = "newrelic-9.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1b93effe99be14a1747a70e9368274091abbed2ef3c593b08a29732d72d83e4"}, - {file = "newrelic-9.1.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45966c5f083d1f1abb76828c4864d20296f6929053fbe4d122e164d84c78e388"}, - {file = "newrelic-9.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e8e6af37cc084007913b991bced2ea2b8b31ed5318a397f210a9625c214459"}, - {file = "newrelic-9.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437dfae553012dbe6e0f288b1f600ef27b0ebc2c367641723529b1a71fa541ae"}, - {file = "newrelic-9.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5489becb7cff1ba992405c9d891fd1743e8042910c97f89ac072fd562501cdb1"}, - {file = "newrelic-9.1.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8403b48807eae524ee1b2da254f4222834f3aef8326dd4475f0b22f924774bde"}, - {file = "newrelic-9.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:603158cb23ed6604845e4fc8ee2a052e60003ccbb7da0f4b00667cd0c7c77c11"}, - {file = "newrelic-9.1.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96dedf6c1a385bf7b4b1336b3e4b5a0d6e983b5305022176c7c43d327e768bf3"}, - {file = "newrelic-9.1.1.tar.gz", hash = "sha256:968a3662ccfb881498789105a52eec866b57c22b8cbdef43889ad76881c62a95"}, + {file = "newrelic-9.2.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2b942eb9cfe8f62268e091d399d95a6762ef5fb90636839d61a30391efbcfbf0"}, + {file = "newrelic-9.2.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:56df42fc26400d8ee1e324bfff40439399149b15fbeb8ffd532a96e54576e69c"}, + {file = "newrelic-9.2.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:2cfa86a5c3388490335385e0c8c155ee1f06d738282721bd05a8c7ceed33fd92"}, + {file = "newrelic-9.2.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4139ef79e6abb7458edcf67f4ac0ce0b7dacbb58357f4d41716971fb15f778b6"}, + {file = "newrelic-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d36cf6ad3cf1df3989dba9c8a5dbc36150dd1852ffeccdf6b957f21a2d5869c0"}, + {file = "newrelic-9.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:420f07d0ea1bfde21507e6a59c5714f3a0451d01ab08f5ef79dd42b4552ef5ac"}, + {file = "newrelic-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1bddb5793117d91e130f2f93a86f5c3d9f29823329d35a38f8f64b738b3335e"}, + {file = "newrelic-9.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b1edfc48c3d483d990bab28f268d64914f76180e057424914bb29e5bcfa927"}, + {file = "newrelic-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f6c5065c434f8fb65e0e5a96a0e385be9baf5625e024cb9c87f345d966a9e5f"}, + {file = "newrelic-9.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b151f1830590cebb53ddf45462101a0ef59d91e28a46a1a652626dc0d4c0148d"}, + {file = "newrelic-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9db7dc6541a519acb327f9409c96f42faefb60748f0827c1dacce7613f88864"}, + {file = "newrelic-9.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2a69115893ec53b1815b1041e231265abcf60d7d4c07ccc335b6146459e07ed"}, + {file = "newrelic-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2911fb601b2a66eb0ab328342e0253889d94102c0b823f9dc5f6124917f9fbac"}, + {file = "newrelic-9.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88c799cb29e6b9c20fed50709edea6bc2b05fc5e40d0fd4d3fe2a37ae4352043"}, + {file = "newrelic-9.2.0.tar.gz", hash = "sha256:32395e1c6a97cc96427abaf4b274ccf00709613a68d18b51fc2c0711cda89bc7"}, ] [package.extras] @@ -4668,4 +4668,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "75a9f5035a4c3598836f34a7e9440aff6b4400ced2494a8e5e7625b50e906fef" +content-hash = "94c7d45c58e7e8f851f98368ff8dc835f860014f265611227a09e6174c0043c3" diff --git a/pyproject.toml b/pyproject.toml index 35987c796e..a761560475 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ PyYAML = "6.0.1" cachelib = "0.12.0" SQLAlchemy = "1.4.52" -newrelic = "9.1.1" +newrelic = "9.2.0" notifications-python-client = "6.4.1" python-dotenv = "1.0.1" pwnedpasswords = "2.0.0" From 56da4f90c3d629c22bc6b34872de49bcf4cb9aa3 Mon Sep 17 00:00:00 2001 From: Jimmy Royer Date: Fri, 15 Nov 2024 11:40:30 -0500 Subject: [PATCH 10/10] Enable package reporting to monitor perf improvements (#2361) --- newrelic.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/newrelic.ini b/newrelic.ini index f546e92b4f..5ef17946aa 100644 --- a/newrelic.ini +++ b/newrelic.ini @@ -202,11 +202,13 @@ package_reporting.enabled = false [newrelic:development] # monitor_mode = false log_level = debug +package_reporting.enabled = true [newrelic:staging] # app_name = Python Application (Staging) # monitor_mode = true log_level = debug +package_reporting.enabled = true [newrelic:production] # monitor_mode = true @@ -217,5 +219,6 @@ log_level = debug [newrelic:dev] # monitor_mode = false log_level = debug +package_reporting.enabled = true # ---------------------------------------------------------------------------