From 0d5cce49cc8c3ff3f2b35ec5bef417bac75fa1ae Mon Sep 17 00:00:00 2001 From: Jumana B Date: Tue, 6 Feb 2024 12:05:29 -0500 Subject: [PATCH 01/33] Task/add index 2 (#2106) * Add index concurrently for n_hist * fix --------- Co-authored-by: William B <7444334+whabanks@users.noreply.github.com> --- .../versions/0444_add_index_n_history2.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 migrations/versions/0444_add_index_n_history2.py diff --git a/migrations/versions/0444_add_index_n_history2.py b/migrations/versions/0444_add_index_n_history2.py new file mode 100644 index 0000000000..2b3604d984 --- /dev/null +++ b/migrations/versions/0444_add_index_n_history2.py @@ -0,0 +1,40 @@ +""" + +Revision ID: 0439_add_index_n_history +Revises: 0438_sms_templates_msgs_left +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op + +revision = "0444_add_index_n_history2" +down_revision = "0443_add_apikey_last_used_column" + + +def index_exists(name): + connection = op.get_bind() + result = connection.execute( + "SELECT exists(SELECT 1 from pg_indexes where indexname = '{}') as ix_exists;".format(name) + ).first() + return result.ix_exists + + +# option 1 +def upgrade(): + op.execute("COMMIT") + if not index_exists("ix_notification_history_api_key_id_created"): + op.create_index( + op.f("ix_notification_history_api_key_id_created"), + "notification_history", + ["api_key_id", "created_at"], + postgresql_concurrently=True, + ) + + +def downgrade(): + op.execute("COMMIT") + op.drop_index( + op.f("ix_notification_history_api_key_id_created"), table_name="notification_history", postgresql_concurrently=True + ) From 5aeb39c9a411b81e80f6515f7337b8f59fdaa7bb Mon Sep 17 00:00:00 2001 From: Jumana B Date: Tue, 6 Feb 2024 15:42:19 -0500 Subject: [PATCH 02/33] Remove notification fallback (#2107) * Remove notification fallback * fix --- app/dao/fact_notification_status_dao.py | 9 +-------- tests/app/api_key/test_rest.py | 9 --------- tests/app/dao/test_fact_notification_status_dao.py | 12 ------------ 3 files changed, 1 insertion(+), 29 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 86c2404708..533ee274b3 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -356,14 +356,7 @@ def get_last_send_for_api_key(api_key_id): api_key_table = ( db.session.query(ApiKey.last_used_timestamp.label("last_notification_created")).filter(ApiKey.id == api_key_id).all() ) - if not api_key_table[0][0]: - notification_table = ( - db.session.query(func.max(Notification.created_at).label("last_notification_created")) - .filter(Notification.api_key_id == api_key_id) - .all() - ) - return [] if notification_table[0][0] is None else notification_table - return api_key_table + return [] if api_key_table[0][0] is None else api_key_table def get_api_key_ranked_by_notifications_created(n_days_back): diff --git a/tests/app/api_key/test_rest.py b/tests/app/api_key/test_rest.py index 0def4b7884..a28985884a 100644 --- a/tests/app/api_key/test_rest.py +++ b/tests/app/api_key/test_rest.py @@ -1,9 +1,6 @@ -from datetime import datetime - import pytest from flask import url_for -from app import DATETIME_FORMAT from app.dao.api_key_dao import get_api_key_by_secret, get_unsigned_secret from app.models import KEY_TYPE_NORMAL from tests import create_sre_authorization_header @@ -33,12 +30,6 @@ def test_get_api_key_stats_with_sends(admin_request, notify_db, notify_db_sessio assert api_key_stats["sms_sends"] == 0 assert api_key_stats["total_sends"] == total_sends - # the following lines test that a send has occurred within the last second - last_send_dt = datetime.strptime(api_key_stats["last_send"], DATETIME_FORMAT) - now = datetime.utcnow() - time_delta = now - last_send_dt - assert abs(time_delta.total_seconds()) < 1 - def test_get_api_key_stats_no_sends(admin_request, notify_db, notify_db_session): service = create_service(service_name="Service 2") diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 85cd078d91..e19d5adbae 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -367,21 +367,9 @@ def test_get_last_send_for_api_key_check_last_used(notify_db_session): def test_get_last_send_for_api_key(notify_db_session): service = create_service(service_name="First Service") api_key = create_api_key(service) - template_email = create_template(service=service, template_type=EMAIL_TYPE) - total_sends = 10 - last_send = get_last_send_for_api_key(str(api_key.id)) assert last_send == [] - for x in range(total_sends): - save_notification(create_notification(template=template_email, api_key=api_key)) - - # the following lines test that a send has occurred within the last second - last_send = get_last_send_for_api_key(str(api_key.id))[0][0] - now = datetime.utcnow() - time_delta = now - last_send - assert abs(time_delta.total_seconds()) < 1 - def test_get_api_key_ranked_by_notifications_created(notify_db_session): service = create_service(service_name="Service 1") From 2e26af0c09ae8ebfefc65da69760191ef4d2acac Mon Sep 17 00:00:00 2001 From: "sre-read-write[bot]" <92993749+sre-read-write[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 15:44:09 -0500 Subject: [PATCH 03/33] chore: synced local '.github/workflows/ossf-scorecard.yml' with remote 'tools/sre_file_sync/ossf-scorecard.yml' (#2111) Co-authored-by: sre-read-write[bot] <92993749+sre-read-write[bot]@users.noreply.github.com> --- .github/workflows/ossf-scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml index 69eba84060..f612283a7c 100644 --- a/.github/workflows/ossf-scorecard.yml +++ b/.github/workflows/ossf-scorecard.yml @@ -25,7 +25,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@155cf0ea68b491a7c47af606d2741b54963ecb04 + uses: ossf/scorecard-action@0ae0fb3a2ca18a43d6dea9c07cfb9bd01d17eae1 with: results_file: ossf-results.json results_format: json From 5bcdb72ee1371e815819221afce6976e77550101 Mon Sep 17 00:00:00 2001 From: Jumana B Date: Tue, 20 Feb 2024 09:33:19 -0500 Subject: [PATCH 04/33] Task: Filter Heartbeats (#2108) * Add filter for heartbeat template * fix formatting * Edit and add a test * test for rest endpoint * Add filter heartbeats for live service data --- app/dao/fact_notification_status_dao.py | 15 ++++- app/dao/services_dao.py | 13 ++++- app/service/rest.py | 6 +- .../dao/test_fact_notification_status_dao.py | 57 +++++++++++++++++++ tests/app/dao/test_services_dao.py | 12 +++- tests/app/service/test_rest.py | 15 +++++ 6 files changed, 108 insertions(+), 10 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 533ee274b3..bdee6ac67c 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -149,8 +149,8 @@ def fetch_notification_status_for_service_by_month(start_date, end_date, service ) -def fetch_delivered_notification_stats_by_month(): - return ( +def fetch_delivered_notification_stats_by_month(filter_heartbeats=None): + query = ( db.session.query( func.date_trunc("month", FactNotificationStatus.bst_date).cast(db.Text).label("month"), FactNotificationStatus.notification_type, @@ -169,8 +169,17 @@ def fetch_delivered_notification_stats_by_month(): func.date_trunc("month", FactNotificationStatus.bst_date).desc(), FactNotificationStatus.notification_type, ) - .all() ) + if filter_heartbeats: + query = query.filter( + FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_LOW"], + FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_MEDIUM"], + FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_HIGH"], + FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_SMS_LOW"], + FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_SMS_MEDIUM"], + FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_SMS_HIGH"], + ) + return query.all() def fetch_notification_stats_for_trial_services(): diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 80d1f25c35..6055e9ff9e 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -83,7 +83,7 @@ def dao_count_live_services(): ).count() -def dao_fetch_live_services_data(): +def dao_fetch_live_services_data(filter_heartbeats=None): year_start_date, year_end_date = get_current_financial_year() most_recent_annual_billing = ( @@ -175,8 +175,17 @@ def dao_fetch_live_services_data(): AnnualBilling.free_sms_fragment_limit, ) .order_by(asc(Service.go_live_at)) - .all() ) + if filter_heartbeats: + data = data.join(Template, Service.id == Template.service_id).filter( + Template.id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_LOW"], + Template.id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_MEDIUM"], + Template.id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_HIGH"], + Template.id != current_app.config["HEARTBEAT_TEMPLATE_SMS_LOW"], + Template.id != current_app.config["HEARTBEAT_TEMPLATE_SMS_MEDIUM"], + Template.id != current_app.config["HEARTBEAT_TEMPLATE_SMS_HIGH"], + ) + data = data.all() results = [] for row in data: existing_service = next((x for x in results if x["service_id"] == row.service_id), None) diff --git a/app/service/rest.py b/app/service/rest.py index 8c198b0151..8ecf13f47d 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -209,13 +209,15 @@ def find_services_by_name(): @service_blueprint.route("/live-services-data", methods=["GET"]) def get_live_services_data(): - data = dao_fetch_live_services_data() + filter_heartbeats = request.args.get("filter_heartbeats", None) == "True" + data = dao_fetch_live_services_data(filter_heartbeats=filter_heartbeats) return jsonify(data=data) @service_blueprint.route("/delivered-notifications-stats-by-month-data", methods=["GET"]) def get_delivered_notification_stats_by_month_data(): - return jsonify(data=fetch_delivered_notification_stats_by_month()) + filter_heartbeats = request.args.get("filter_heartbeats", None) == "True" + return jsonify(data=fetch_delivered_notification_stats_by_month(filter_heartbeats=filter_heartbeats)) @service_blueprint.route("/", methods=["GET"]) diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index e19d5adbae..ce26830584 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -51,6 +51,7 @@ create_template, save_notification, ) +from tests.conftest import set_config def test_update_fact_notification_status(notify_db_session): @@ -728,6 +729,62 @@ def test_fetch_delivered_notification_stats_by_month(sample_service): assert results[3].count == 6 +@freeze_time("2020-11-02 14:00") +def test_fetch_delivered_notification_stats_by_month_filter_heartbeats(notify_api, sample_service): + sms_template = create_template(service=sample_service, template_type="sms", template_name="a") + email_template = create_template(service=sample_service, template_type="email", template_name="b") + + # Not counted: before GC Notify started + create_ft_notification_status( + utc_date=date(2019, 10, 10), + service=sample_service, + template=email_template, + count=3, + ) + + create_ft_notification_status( + utc_date=date(2019, 12, 10), + service=sample_service, + template=email_template, + count=3, + ) + + create_ft_notification_status( + utc_date=date(2019, 12, 5), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_DELIVERED, + count=6, + ) + + create_ft_notification_status( + utc_date=date(2020, 1, 1), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_SENT, + count=4, + ) + + # Not counted: failed notifications + create_ft_notification_status( + utc_date=date(2020, 1, 1), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_FAILED, + count=10, + ) + + create_ft_notification_status( + utc_date=date(2020, 3, 1), + service=sample_service, + template=email_template, + count=5, + ) + with set_config(notify_api, "HEARTBEAT_TEMPLATE_EMAIL_LOW", email_template.id): + results = fetch_delivered_notification_stats_by_month(filter_heartbeats=True) + assert len(results) == 2 + + def test_fetch_delivered_notification_stats_by_month_empty(): assert fetch_delivered_notification_stats_by_month() == [] diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index 4a81ea4f86..cb5caa8c3b 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -87,6 +87,7 @@ create_user, save_notification, ) +from tests.conftest import set_config # from unittest import mock @@ -493,7 +494,8 @@ def test_get_all_user_services_should_return_empty_list_if_no_services_for_user( @freeze_time("2019-04-23T10:00:00") -def test_dao_fetch_live_services_data(sample_user): +@pytest.mark.parametrize("filter_heartbeats", [True, False]) +def test_dao_fetch_live_services_data_filter_heartbeats(notify_api, sample_user, filter_heartbeats): org = create_organisation(organisation_type="nhs_central") service = create_service(go_live_user=sample_user, go_live_at="2014-04-20T10:00:00") template = create_template(service=service) @@ -561,8 +563,12 @@ def test_dao_fetch_live_services_data(sample_user): # 3rd service: billing from 2019 create_annual_billing(service_3.id, 200, 2019) - results = dao_fetch_live_services_data() - assert len(results) == 3 + with set_config(notify_api, "HEARTBEAT_TEMPLATE_EMAIL_LOW", template.id): + results = dao_fetch_live_services_data(filter_heartbeats=filter_heartbeats) + if not filter_heartbeats: + assert len(results) == 3 + else: + assert len(results) == 2 # checks the results and that they are ordered by date: # @todo: this test is temporarily forced to pass until we can add the fiscal year back into # the query and create a new endpoint for the homepage stats diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index e3e309cda5..c15b2dec6b 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -70,6 +70,7 @@ create_user, save_notification, ) +from tests.conftest import set_config def test_get_service_list(client, service_factory): @@ -253,6 +254,20 @@ def test_get_delivered_notification_stats_by_month_data(admin_request, sample_se assert first["count"] == 3 +def test_get_delivered_notification_stats_by_month_data_without_heartbeat(notify_api, admin_request, sample_service): + email_template = create_template(service=sample_service, template_type="email", template_name="b") + + create_ft_notification_status( + utc_date=date(2019, 12, 10), + service=sample_service, + template=email_template, + count=3, + ) + with set_config(notify_api, "HEARTBEAT_TEMPLATE_EMAIL_LOW", email_template.id): + response = admin_request.get("service.get_delivered_notification_stats_by_month_data", filter_heartbeats=True)["data"] + assert len(response) == 0 + + def test_get_service_by_id(admin_request, sample_service): json_resp = admin_request.get("service.get_service_by_id", service_id=sample_service.id) assert json_resp["data"]["name"] == sample_service.name From 750c21a4174d3349b414395aaa4fbfda7db93103 Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Wed, 21 Feb 2024 13:45:23 -0400 Subject: [PATCH 05/33] Revert "Task: Filter Heartbeats (#2108)" (#2117) This reverts commit 5bcdb72ee1371e815819221afce6976e77550101. --- app/dao/fact_notification_status_dao.py | 15 +---- app/dao/services_dao.py | 13 +---- app/service/rest.py | 6 +- .../dao/test_fact_notification_status_dao.py | 57 ------------------- tests/app/dao/test_services_dao.py | 12 +--- tests/app/service/test_rest.py | 15 ----- 6 files changed, 10 insertions(+), 108 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index bdee6ac67c..533ee274b3 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -149,8 +149,8 @@ def fetch_notification_status_for_service_by_month(start_date, end_date, service ) -def fetch_delivered_notification_stats_by_month(filter_heartbeats=None): - query = ( +def fetch_delivered_notification_stats_by_month(): + return ( db.session.query( func.date_trunc("month", FactNotificationStatus.bst_date).cast(db.Text).label("month"), FactNotificationStatus.notification_type, @@ -169,17 +169,8 @@ def fetch_delivered_notification_stats_by_month(filter_heartbeats=None): func.date_trunc("month", FactNotificationStatus.bst_date).desc(), FactNotificationStatus.notification_type, ) + .all() ) - if filter_heartbeats: - query = query.filter( - FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_LOW"], - FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_MEDIUM"], - FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_HIGH"], - FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_SMS_LOW"], - FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_SMS_MEDIUM"], - FactNotificationStatus.template_id != current_app.config["HEARTBEAT_TEMPLATE_SMS_HIGH"], - ) - return query.all() def fetch_notification_stats_for_trial_services(): diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 6055e9ff9e..80d1f25c35 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -83,7 +83,7 @@ def dao_count_live_services(): ).count() -def dao_fetch_live_services_data(filter_heartbeats=None): +def dao_fetch_live_services_data(): year_start_date, year_end_date = get_current_financial_year() most_recent_annual_billing = ( @@ -175,17 +175,8 @@ def dao_fetch_live_services_data(filter_heartbeats=None): AnnualBilling.free_sms_fragment_limit, ) .order_by(asc(Service.go_live_at)) + .all() ) - if filter_heartbeats: - data = data.join(Template, Service.id == Template.service_id).filter( - Template.id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_LOW"], - Template.id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_MEDIUM"], - Template.id != current_app.config["HEARTBEAT_TEMPLATE_EMAIL_HIGH"], - Template.id != current_app.config["HEARTBEAT_TEMPLATE_SMS_LOW"], - Template.id != current_app.config["HEARTBEAT_TEMPLATE_SMS_MEDIUM"], - Template.id != current_app.config["HEARTBEAT_TEMPLATE_SMS_HIGH"], - ) - data = data.all() results = [] for row in data: existing_service = next((x for x in results if x["service_id"] == row.service_id), None) diff --git a/app/service/rest.py b/app/service/rest.py index 8ecf13f47d..8c198b0151 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -209,15 +209,13 @@ def find_services_by_name(): @service_blueprint.route("/live-services-data", methods=["GET"]) def get_live_services_data(): - filter_heartbeats = request.args.get("filter_heartbeats", None) == "True" - data = dao_fetch_live_services_data(filter_heartbeats=filter_heartbeats) + data = dao_fetch_live_services_data() return jsonify(data=data) @service_blueprint.route("/delivered-notifications-stats-by-month-data", methods=["GET"]) def get_delivered_notification_stats_by_month_data(): - filter_heartbeats = request.args.get("filter_heartbeats", None) == "True" - return jsonify(data=fetch_delivered_notification_stats_by_month(filter_heartbeats=filter_heartbeats)) + return jsonify(data=fetch_delivered_notification_stats_by_month()) @service_blueprint.route("/", methods=["GET"]) diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index ce26830584..e19d5adbae 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -51,7 +51,6 @@ create_template, save_notification, ) -from tests.conftest import set_config def test_update_fact_notification_status(notify_db_session): @@ -729,62 +728,6 @@ def test_fetch_delivered_notification_stats_by_month(sample_service): assert results[3].count == 6 -@freeze_time("2020-11-02 14:00") -def test_fetch_delivered_notification_stats_by_month_filter_heartbeats(notify_api, sample_service): - sms_template = create_template(service=sample_service, template_type="sms", template_name="a") - email_template = create_template(service=sample_service, template_type="email", template_name="b") - - # Not counted: before GC Notify started - create_ft_notification_status( - utc_date=date(2019, 10, 10), - service=sample_service, - template=email_template, - count=3, - ) - - create_ft_notification_status( - utc_date=date(2019, 12, 10), - service=sample_service, - template=email_template, - count=3, - ) - - create_ft_notification_status( - utc_date=date(2019, 12, 5), - service=sample_service, - template=sms_template, - notification_status=NOTIFICATION_DELIVERED, - count=6, - ) - - create_ft_notification_status( - utc_date=date(2020, 1, 1), - service=sample_service, - template=sms_template, - notification_status=NOTIFICATION_SENT, - count=4, - ) - - # Not counted: failed notifications - create_ft_notification_status( - utc_date=date(2020, 1, 1), - service=sample_service, - template=sms_template, - notification_status=NOTIFICATION_FAILED, - count=10, - ) - - create_ft_notification_status( - utc_date=date(2020, 3, 1), - service=sample_service, - template=email_template, - count=5, - ) - with set_config(notify_api, "HEARTBEAT_TEMPLATE_EMAIL_LOW", email_template.id): - results = fetch_delivered_notification_stats_by_month(filter_heartbeats=True) - assert len(results) == 2 - - def test_fetch_delivered_notification_stats_by_month_empty(): assert fetch_delivered_notification_stats_by_month() == [] diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index cb5caa8c3b..4a81ea4f86 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -87,7 +87,6 @@ create_user, save_notification, ) -from tests.conftest import set_config # from unittest import mock @@ -494,8 +493,7 @@ def test_get_all_user_services_should_return_empty_list_if_no_services_for_user( @freeze_time("2019-04-23T10:00:00") -@pytest.mark.parametrize("filter_heartbeats", [True, False]) -def test_dao_fetch_live_services_data_filter_heartbeats(notify_api, sample_user, filter_heartbeats): +def test_dao_fetch_live_services_data(sample_user): org = create_organisation(organisation_type="nhs_central") service = create_service(go_live_user=sample_user, go_live_at="2014-04-20T10:00:00") template = create_template(service=service) @@ -563,12 +561,8 @@ def test_dao_fetch_live_services_data_filter_heartbeats(notify_api, sample_user, # 3rd service: billing from 2019 create_annual_billing(service_3.id, 200, 2019) - with set_config(notify_api, "HEARTBEAT_TEMPLATE_EMAIL_LOW", template.id): - results = dao_fetch_live_services_data(filter_heartbeats=filter_heartbeats) - if not filter_heartbeats: - assert len(results) == 3 - else: - assert len(results) == 2 + results = dao_fetch_live_services_data() + assert len(results) == 3 # checks the results and that they are ordered by date: # @todo: this test is temporarily forced to pass until we can add the fiscal year back into # the query and create a new endpoint for the homepage stats diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index c15b2dec6b..e3e309cda5 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -70,7 +70,6 @@ create_user, save_notification, ) -from tests.conftest import set_config def test_get_service_list(client, service_factory): @@ -254,20 +253,6 @@ def test_get_delivered_notification_stats_by_month_data(admin_request, sample_se assert first["count"] == 3 -def test_get_delivered_notification_stats_by_month_data_without_heartbeat(notify_api, admin_request, sample_service): - email_template = create_template(service=sample_service, template_type="email", template_name="b") - - create_ft_notification_status( - utc_date=date(2019, 12, 10), - service=sample_service, - template=email_template, - count=3, - ) - with set_config(notify_api, "HEARTBEAT_TEMPLATE_EMAIL_LOW", email_template.id): - response = admin_request.get("service.get_delivered_notification_stats_by_month_data", filter_heartbeats=True)["data"] - assert len(response) == 0 - - def test_get_service_by_id(admin_request, sample_service): json_resp = admin_request.get("service.get_service_by_id", service_id=sample_service.id) assert json_resp["data"]["name"] == sample_service.name From 098df03aeae7335115db88e9941103c6622c7030 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Thu, 22 Feb 2024 10:06:36 -0500 Subject: [PATCH 06/33] upgrade poetry and cryptography (#2115) Co-authored-by: William B <7444334+whabanks@users.noreply.github.com> --- .devcontainer/Dockerfile | 2 +- .github/workflows/test.yaml | 4 +- Makefile | 2 +- ci/Dockerfile | 3 +- ci/Dockerfile.lambda | 2 +- ci/Dockerfile.test | 4 +- local/Dockerfile | 2 +- poetry.lock | 254 +++++++++++++++++++++++++++++++----- pyproject.toml | 3 +- tests-perf/ops/Dockerfile | 4 +- 10 files changed, 230 insertions(+), 50 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index c6a2f6b839..49963dcbfa 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -2,7 +2,7 @@ FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.10@sha256:ef9cc483a593c95 ARG KUBENS_VERSION="0.9.4" ARG OCTANT_VERSION="0.25.1" -ENV POETRY_VERSION="1.3.2" +ENV POETRY_VERSION="1.7.1" # Install packages RUN apt-get update \ diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index e74ffd0f67..3169d77d64 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -33,10 +33,10 @@ jobs: ${{ runner.os }}-pip- - name: Install poetry env: - POETRY_VERSION: "1.3.2" + POETRY_VERSION: "1.7.1" run: pip install poetry==${POETRY_VERSION} && poetry --version - name: Check poetry.lock aligns with pyproject.toml - run: poetry lock --check + run: poetry check --lock - name: Install requirements run: poetry install --with test - name: Run tests diff --git a/Makefile b/Makefile index 0b1064d543..3f98e79566 100644 --- a/Makefile +++ b/Makefile @@ -25,7 +25,7 @@ freeze-requirements: .PHONY: test-requirements test-requirements: - poetry lock --check + poetry check --lock .PHONY: coverage coverage: venv ## Create coverage report diff --git a/ci/Dockerfile b/ci/Dockerfile index 3b6c11f11f..144e6bb07f 100644 --- a/ci/Dockerfile +++ b/ci/Dockerfile @@ -1,10 +1,9 @@ FROM python:3.10-alpine3.16@sha256:afe68972cc00883d70b3760ee0ffbb7375cf09706c122dda7063ffe64c5be21b ENV PYTHONDONTWRITEBYTECODE 1 -ENV POETRY_VERSION "1.3.2" ENV APP_VENV="/app/.venv" ENV POETRY_HOME="/opt/poetry" -ENV POETRY_VERSION="1.3.2" +ENV POETRY_VERSION="1.7.1" ENV POETRY_VIRTUALENVS_CREATE="false" ENV PATH="${APP_VENV}/bin:${POETRY_HOME}/bin:$PATH" diff --git a/ci/Dockerfile.lambda b/ci/Dockerfile.lambda index bf18db168a..c827b08c24 100644 --- a/ci/Dockerfile.lambda +++ b/ci/Dockerfile.lambda @@ -5,7 +5,7 @@ ENV PYTHONDONTWRITEBYTECODE 1 ENV TASK_ROOT /app ENV APP_VENV="${TASK_ROOT}/.venv" ENV POETRY_HOME="/opt/poetry" -ENV POETRY_VERSION="1.3.2" +ENV POETRY_VERSION="1.7.1" ENV POETRY_VIRTUALENVS_CREATE="false" ENV PATH="${APP_VENV}/bin:${POETRY_HOME}/bin:$PATH" diff --git a/ci/Dockerfile.test b/ci/Dockerfile.test index ccdc2d7208..e068dfbfd5 100644 --- a/ci/Dockerfile.test +++ b/ci/Dockerfile.test @@ -3,10 +3,10 @@ FROM python:3.10-alpine@sha256:860f632e67178d9e90c7dfa9844a5e02098220bff5716d3c2fe1870325f00853 ENV PYTHONDONTWRITEBYTECODE 1 -ENV POETRY_VERSION "1.3.2" +ENV POETRY_VERSION "1.7.1" ARG APP_VENV="/app/.venv" ARG POETRY_HOME="/opt/poetry" -ARG POETRY_VERSION="1.3.2" +ARG POETRY_VERSION="1.7.1" ARG POETRY_VIRTUALENVS_CREATE="false" ENV PATH="${APP_VENV}/bin:${POETRY_HOME}/bin:$PATH" diff --git a/local/Dockerfile b/local/Dockerfile index 11444b22cf..f4ea41376c 100644 --- a/local/Dockerfile +++ b/local/Dockerfile @@ -1,7 +1,7 @@ FROM python:3.10-alpine@sha256:860f632e67178d9e90c7dfa9844a5e02098220bff5716d3c2fe1870325f00853 ENV PYTHONDONTWRITEBYTECODE 1 -ENV POETRY_VERSION "1.3.2" +ENV POETRY_VERSION "1.7.1" RUN apk add --no-cache bash build-base git gcc musl-dev postgresql-dev g++ make libffi-dev libmagic libcurl curl-dev && rm -rf /var/cache/apk/* diff --git a/poetry.lock b/poetry.lock index 7014e7e5fd..5482ec4aa3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "aiohttp" version = "3.9.1" description = "Async http client/server framework (asyncio)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -100,6 +101,7 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -114,6 +116,7 @@ frozenlist = ">=1.1.0" name = "alembic" version = "1.12.1" description = "A database migration tool for SQLAlchemy." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -133,6 +136,7 @@ tz = ["python-dateutil"] name = "amqp" version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -147,6 +151,7 @@ vine = ">=5.0.0,<6.0.0" name = "annotated-types" version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -158,6 +163,7 @@ files = [ name = "apig-wsgi" version = "2.18.0" description = "Wrap a WSGI application in an AWS Lambda handler function for running on API Gateway or an ALB." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -169,6 +175,7 @@ files = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -180,6 +187,7 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -198,6 +206,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "aws-embedded-metrics" version = "1.0.8" description = "AWS Embedded Metrics Package" +category = "main" optional = false python-versions = "*" files = [ @@ -212,6 +221,7 @@ aiohttp = "*" name = "awscli" version = "1.32.25" description = "Universal Command Line Environment for AWS." +category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -231,6 +241,7 @@ s3transfer = ">=0.10.0,<0.11.0" name = "awscli-cwlogs" version = "1.4.6" description = "AWSCLI CloudWatch Logs plugin" +category = "main" optional = false python-versions = "*" files = [ @@ -247,6 +258,7 @@ six = ">=1.1.0" name = "bcrypt" version = "4.1.1" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -277,6 +289,7 @@ typecheck = ["mypy"] name = "billiard" version = "4.2.0" description = "Python multiprocessing fork with improvements and bugfixes" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -288,6 +301,7 @@ files = [ name = "black" version = "23.7.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -333,6 +347,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -351,6 +366,7 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "blinker" version = "1.7.0" description = "Fast, simple object-to-object and broadcast signaling" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -362,6 +378,7 @@ files = [ name = "boto" version = "2.49.0" description = "Amazon Web Services Library" +category = "main" optional = false python-versions = "*" files = [ @@ -373,6 +390,7 @@ files = [ name = "boto3" version = "1.34.25" description = "The AWS SDK for Python" +category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -392,6 +410,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] name = "botocore" version = "1.34.25" description = "Low-level, data-driven core of boto 3." +category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -411,6 +430,7 @@ crt = ["awscrt (==0.19.19)"] name = "brotli" version = "1.1.0" description = "Python bindings for the Brotli compression library" +category = "dev" optional = false python-versions = "*" files = [ @@ -503,6 +523,7 @@ files = [ name = "cachelib" version = "0.10.2" description = "A collection of cache libraries in the same API interface." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -514,6 +535,7 @@ files = [ name = "cachetools" version = "4.2.4" description = "Extensible memoizing collections and decorators" +category = "main" optional = false python-versions = "~=3.5" files = [ @@ -525,6 +547,7 @@ files = [ name = "celery" version = "5.3.6" description = "Distributed Task Queue." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -586,6 +609,7 @@ zstd = ["zstandard (==0.22.0)"] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -597,6 +621,7 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = "*" files = [ @@ -673,6 +698,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -772,6 +798,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -786,6 +813,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-datetime" version = "0.2" description = "Datetime type support for click." +category = "main" optional = false python-versions = "*" files = [ @@ -803,6 +831,7 @@ dev = ["wheel"] name = "click-didyoumean" version = "0.3.0" description = "Enables git-like *did-you-mean* feature in click" +category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -817,6 +846,7 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" optional = false python-versions = "*" files = [ @@ -834,6 +864,7 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -852,6 +883,7 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.4" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -863,6 +895,7 @@ files = [ name = "configargparse" version = "1.7" description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -878,6 +911,7 @@ yaml = ["PyYAML"] name = "coverage" version = "5.5" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" files = [ @@ -945,6 +979,7 @@ toml = ["toml"] name = "coveralls" version = "3.3.1" description = "Show coverage stats online via coveralls.io" +category = "dev" optional = false python-versions = ">= 3.5" files = [ @@ -953,7 +988,7 @@ files = [ ] [package.dependencies] -coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" +coverage = ">=4.1,<6.0.0 || >6.1,<6.1.1 || >6.1.1,<7.0" docopt = ">=0.6.1" requests = ">=1.0.0" @@ -962,53 +997,64 @@ yaml = ["PyYAML (>=3.10)"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:de5086cd475d67113ccb6f9fae6d8fe3ac54a4f9238fd08bfdb07b03d791ff0a"}, + {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:935cca25d35dda9e7bd46a24831dfd255307c55a07ff38fd1a92119cffc34857"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20100c22b298c9eaebe4f0b9032ea97186ac2555f426c3e70670f2517989543b"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eb6368d5327d6455f20327fb6159b97538820355ec00f8cc9464d617caecead"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39d5c93e95bcbc4c06313fc6a500cee414ee39b616b55320c1904760ad686938"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d96ea47ce6d0055d5b97e761d37b4e84195485cb5a38401be341fabf23bc32a"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d1998e545081da0ab276bcb4b33cce85f775adb86a516e8f55b3dac87f469548"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93fbee08c48e63d5d1b39ab56fd3fdd02e6c2431c3da0f4edaf54954744c718f"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:90147dad8c22d64b2ff7331f8d4cddfdc3ee93e4879796f837bdbb2a0b141e0c"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4dcab7c25e48fc09a73c3e463d09ac902a932a0f8d0c568238b3696d06bf377b"}, + {file = "cryptography-42.0.3-cp37-abi3-win32.whl", hash = "sha256:1e935c2900fb53d31f491c0de04f41110351377be19d83d908c1fd502ae8daa5"}, + {file = "cryptography-42.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:762f3771ae40e111d78d77cbe9c1035e886ac04a234d3ee0856bf4ecb3749d54"}, + {file = "cryptography-42.0.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3ec384058b642f7fb7e7bff9664030011ed1af8f852540c76a1317a9dd0d20"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35772a6cffd1f59b85cb670f12faba05513446f80352fe811689b4e439b5d89e"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04859aa7f12c2b5f7e22d25198ddd537391f1695df7057c8700f71f26f47a129"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c3d1f5a1d403a8e640fa0887e9f7087331abb3f33b0f2207d2cc7f213e4a864c"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df34312149b495d9d03492ce97471234fd9037aa5ba217c2a6ea890e9166f151"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:de4ae486041878dc46e571a4c70ba337ed5233a1344c14a0790c4c4be4bbb8b4"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0fab2a5c479b360e5e0ea9f654bcebb535e3aa1e493a715b13244f4e07ea8eec"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25b09b73db78facdfd7dd0fa77a3f19e94896197c86e9f6dc16bce7b37a96504"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d5cf11bc7f0b71fb71af26af396c83dfd3f6eed56d4b6ef95d57867bf1e4ba65"}, + {file = "cryptography-42.0.3-cp39-abi3-win32.whl", hash = "sha256:0fea01527d4fb22ffe38cd98951c9044400f6eff4788cf52ae116e27d30a1ba3"}, + {file = "cryptography-42.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:2619487f37da18d6826e27854a7f9d4d013c51eafb066c80d09c63cf24505306"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ead69ba488f806fe1b1b4050febafdbf206b81fa476126f3e16110c818bac396"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:20180da1b508f4aefc101cebc14c57043a02b355d1a652b6e8e537967f1e1b46"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fbf0f3f0fac7c089308bd771d2c6c7b7d53ae909dce1db52d8e921f6c19bb3a"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c23f03cfd7d9826cdcbad7850de67e18b4654179e01fe9bc623d37c2638eb4ef"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db0480ffbfb1193ac4e1e88239f31314fe4c6cdcf9c0b8712b55414afbf80db4"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:6c25e1e9c2ce682d01fc5e2dde6598f7313027343bd14f4049b82ad0402e52cd"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9541c69c62d7446539f2c1c06d7046aef822940d248fa4b8962ff0302862cc1f"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b797099d221df7cce5ff2a1d272761d1554ddf9a987d3e11f6459b38cd300fd"}, + {file = "cryptography-42.0.3.tar.gz", hash = "sha256:069d2ce9be5526a44093a0991c450fe9906cdf069e0e7cd67d9dee49a62b9ebe"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" +category = "main" optional = false python-versions = "*" files = [ @@ -1019,6 +1065,7 @@ files = [ name = "docutils" version = "0.16" description = "Docutils -- Python Documentation Utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1030,6 +1077,7 @@ files = [ name = "environs" version = "9.5.0" description = "simplified environment variable parsing" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1051,6 +1099,7 @@ tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1065,6 +1114,7 @@ test = ["pytest (>=6)"] name = "execnet" version = "2.0.2" description = "execnet: rapid multi-Python deployment" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1079,6 +1129,7 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] name = "fido2" version = "0.9.3" description = "Python based FIDO 2.0 library" +category = "main" optional = false python-versions = ">=2.7.6,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" files = [ @@ -1096,6 +1147,7 @@ pcsc = ["pyscard"] name = "filelock" version = "3.13.1" description = "A platform independent file lock." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1112,6 +1164,7 @@ typing = ["typing-extensions (>=4.8)"] name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -1128,6 +1181,7 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.3" description = "A simple framework for building complex web applications." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1150,6 +1204,7 @@ dotenv = ["python-dotenv"] name = "flask-basicauth" version = "0.2.0" description = "HTTP basic access authentication for Flask." +category = "dev" optional = false python-versions = "*" files = [ @@ -1163,6 +1218,7 @@ Flask = "*" name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." +category = "main" optional = false python-versions = "*" files = [ @@ -1178,6 +1234,7 @@ Flask = "*" name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" +category = "dev" optional = false python-versions = "*" files = [ @@ -1192,6 +1249,7 @@ Flask = ">=0.9" name = "flask-marshmallow" version = "0.14.0" description = "Flask + marshmallow for beautiful APIs" +category = "main" optional = false python-versions = "*" files = [ @@ -1215,6 +1273,7 @@ tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-s name = "flask-migrate" version = "2.7.0" description = "SQLAlchemy database migrations for Flask applications using Alembic" +category = "main" optional = false python-versions = "*" files = [ @@ -1231,6 +1290,7 @@ Flask-SQLAlchemy = ">=1.0" name = "flask-redis" version = "0.4.0" description = "A nice way to use Redis in your Flask app" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1250,6 +1310,7 @@ tests = ["coverage", "pytest", "pytest-mock"] name = "Flask-SQLAlchemy" version = "2.3.2.dev20231128" description = "Adds SQLAlchemy support to your Flask application" +category = "main" optional = false python-versions = "*" files = [] @@ -1269,6 +1330,7 @@ resolved_reference = "500e732dd1b975a56ab06a46bd1a20a21e682262" name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1283,6 +1345,7 @@ python-dateutil = ">=2.7" name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1353,6 +1416,7 @@ files = [ name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1363,6 +1427,7 @@ files = [ name = "gevent" version = "23.9.1" description = "Coroutine-based network library" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1425,6 +1490,7 @@ test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idn name = "geventhttpclient" version = "2.0.11" description = "http client library for gevent" +category = "dev" optional = false python-versions = "*" files = [ @@ -1548,6 +1614,7 @@ six = "*" name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -1625,6 +1692,7 @@ test = ["objgraph", "psutil"] name = "gunicorn" version = "20.1.0" description = "WSGI HTTP Server for UNIX" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1645,6 +1713,7 @@ tornado = ["tornado (>=0.2)"] name = "idna" version = "2.10" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1656,6 +1725,7 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1667,6 +1737,7 @@ files = [ name = "iso8601" version = "2.0.0" description = "Simple module to parse ISO 8601 dates" +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1678,6 +1749,7 @@ files = [ name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" optional = false python-versions = "*" files = [ @@ -1692,6 +1764,7 @@ six = "*" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1709,6 +1782,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1720,6 +1794,7 @@ files = [ name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1737,6 +1812,7 @@ i18n = ["Babel (>=2.7)"] name = "jinja2-cli" version = "0.8.2" description = "A CLI interface to Jinja2" +category = "dev" optional = false python-versions = "*" files = [ @@ -1758,6 +1834,7 @@ yaml = ["jinja2", "pyyaml"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1769,6 +1846,7 @@ files = [ name = "jsonschema" version = "3.2.0" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -1790,6 +1868,7 @@ format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-va name = "kombu" version = "5.3.4" description = "Messaging library for Python." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1825,6 +1904,7 @@ zookeeper = ["kazoo (>=2.8.0)"] name = "locust" version = "2.16.1" description = "Developer friendly load testing framework" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1852,6 +1932,7 @@ Werkzeug = ">=2.0.0" name = "lxml" version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -1959,6 +2040,7 @@ source = ["Cython (>=0.29.35)"] name = "mako" version = "1.3.0" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1978,6 +2060,7 @@ testing = ["pytest"] name = "markupsafe" version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2047,6 +2130,7 @@ files = [ name = "marshmallow" version = "3.20.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2067,6 +2151,7 @@ tests = ["pytest", "pytz", "simplejson"] name = "marshmallow-sqlalchemy" version = "0.29.0" description = "SQLAlchemy integration with the marshmallow (de)serialization library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2089,6 +2174,7 @@ tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2100,6 +2186,7 @@ files = [ name = "mistune" version = "0.8.4" description = "The fastest markdown parser in pure Python" +category = "main" optional = false python-versions = "*" files = [ @@ -2111,6 +2198,7 @@ files = [ name = "more-itertools" version = "8.14.0" description = "More routines for operating on iterables, beyond itertools" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2122,6 +2210,7 @@ files = [ name = "moto" version = "4.1.11" description = "" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2168,6 +2257,7 @@ xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] name = "msgpack" version = "1.0.7" description = "MessagePack serializer" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2233,6 +2323,7 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2316,6 +2407,7 @@ files = [ name = "mypy" version = "1.5.0" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2357,6 +2449,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2368,6 +2461,7 @@ files = [ name = "nanoid" version = "2.0.0" description = "A tiny, secure, URL-friendly, unique string ID generator for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -2379,6 +2473,7 @@ files = [ name = "networkx" version = "2.8.8" description = "Python package for creating and manipulating graphs and networks" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2397,6 +2492,7 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "newrelic" version = "6.10.0.165" description = "New Relic Python Agent" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" files = [ @@ -2422,6 +2518,7 @@ infinite-tracing = ["grpcio (<2)", "protobuf (<4)"] name = "notifications-python-client" version = "6.4.1" description = "Python API client for GOV.UK Notify." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2435,8 +2532,9 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.1.0" +version = "52.1.3" description = "Shared python code for Notification - Provides logging utils etc." +category = "main" optional = false python-versions = "~3.10" files = [] @@ -2448,7 +2546,7 @@ bleach = "6.0.0" boto3 = "1.34.25" cachetools = "4.2.4" certifi = "^2023.7.22" -cryptography = "^41.0.2" +cryptography = "^42.0.3" Flask = "2.3.3" Flask-Redis = "0.4.0" itsdangerous = "2.1.2" @@ -2470,13 +2568,14 @@ werkzeug = "2.3.7" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "2da74685e0ffb220f0403e1f2584e783be99bbad" -resolved_reference = "2da74685e0ffb220f0403e1f2584e783be99bbad" +reference = "upgrade-cryptography" +resolved_reference = "ac0422352576898b51325f914cf98d18e66f0bb3" [[package]] name = "ordered-set" version = "4.1.0" description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2491,6 +2590,7 @@ dev = ["black", "mypy", "pytest"] name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2502,6 +2602,7 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2513,6 +2614,7 @@ files = [ name = "pendulum" version = "2.1.2" description = "Python datetimes made easy" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2547,6 +2649,7 @@ pytzdata = ">=2020.1" name = "phonenumbers" version = "8.13.28" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." +category = "main" optional = false python-versions = "*" files = [ @@ -2558,6 +2661,7 @@ files = [ name = "platformdirs" version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2573,6 +2677,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2588,6 +2693,7 @@ testing = ["pytest", "pytest-benchmark"] name = "prompt-toolkit" version = "3.0.41" description = "Library for building powerful interactive command lines in Python" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2602,6 +2708,7 @@ wcwidth = "*" name = "psutil" version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2630,6 +2737,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2711,6 +2819,7 @@ files = [ name = "pwnedpasswords" version = "2.0.0" description = "A Python wrapper for Troy Hunt's Pwned Passwords API." +category = "main" optional = false python-versions = "*" files = [ @@ -2724,6 +2833,7 @@ future = "*" name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2735,6 +2845,7 @@ files = [ name = "py-w3c" version = "0.3.1" description = "W3C services for python." +category = "main" optional = false python-versions = "*" files = [ @@ -2745,6 +2856,7 @@ files = [ name = "pyasn1" version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2756,6 +2868,7 @@ files = [ name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2767,6 +2880,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2778,6 +2892,7 @@ files = [ name = "pycurl" version = "7.45.2" description = "PycURL -- A Python Interface To The cURL library" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2788,6 +2903,7 @@ files = [ name = "pydantic" version = "2.5.2" description = "Data validation using Python type hints" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2807,6 +2923,7 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.14.5" description = "" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2924,6 +3041,7 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2935,6 +3053,7 @@ files = [ name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2952,6 +3071,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pypdf2" version = "1.28.6" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +category = "main" optional = false python-versions = ">=2.7" files = [ @@ -2963,6 +3083,7 @@ files = [ name = "pyrsistent" version = "0.20.0" description = "Persistent/Functional/Immutable data structures" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3004,6 +3125,7 @@ files = [ name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3026,6 +3148,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3044,6 +3167,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-env" version = "0.8.2" description = "py.test plugin that allows you to add environment variables." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3061,6 +3185,7 @@ test = ["coverage (>=7.2.7)", "pytest-mock (>=3.10)"] name = "pytest-forked" version = "1.6.0" description = "run tests in isolated forked subprocesses" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3076,6 +3201,7 @@ pytest = ">=3.10" name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3093,6 +3219,7 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "pytest-mock-resources" version = "2.9.2" description = "A pytest plugin for easily instantiating reproducible mock resources." +category = "dev" optional = false python-versions = ">=3.7,<4" files = [ @@ -3123,6 +3250,7 @@ redshift = ["boto3", "filelock", "moto", "python-on-whales (>=0.22.0)", "sqlpars name = "pytest-xdist" version = "2.5.0" description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3144,6 +3272,7 @@ testing = ["filelock"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3158,6 +3287,7 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3172,6 +3302,7 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3183,6 +3314,7 @@ files = [ name = "python-magic" version = "0.4.27" description = "File type identification using libmagic" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3194,6 +3326,7 @@ files = [ name = "python-on-whales" version = "0.67.0" description = "A Docker client for Python, designed to be fun and intuitive!" +category = "dev" optional = false python-versions = "<4,>=3.8" files = [ @@ -3202,7 +3335,7 @@ files = [ ] [package.dependencies] -pydantic = ">=1.9,<2.0.dev0 || >=2.1.dev0,<3" +pydantic = ">=1.9,<2.0.0 || >=2.1.0,<3" requests = "*" tqdm = "*" typer = ">=0.4.1" @@ -3215,6 +3348,7 @@ test = ["pytest"] name = "pytz" version = "2021.3" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -3226,6 +3360,7 @@ files = [ name = "pytzdata" version = "2020.1" description = "The Olson timezone database for Python." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3237,6 +3372,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "dev" optional = false python-versions = "*" files = [ @@ -3260,6 +3396,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3320,6 +3457,7 @@ files = [ name = "pyzmq" version = "25.1.1" description = "Python bindings for 0MQ" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3425,6 +3563,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "redis" version = "5.0.1" description = "Python client for Redis database and key-value store" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3443,6 +3582,7 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3464,6 +3604,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-file" version = "1.5.1" description = "File transport adapter for Requests" +category = "main" optional = false python-versions = "*" files = [ @@ -3479,6 +3620,7 @@ six = "*" name = "requests-mock" version = "1.11.0" description = "Mock out responses from the requests package" +category = "dev" optional = false python-versions = "*" files = [ @@ -3498,6 +3640,7 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3512,6 +3655,7 @@ requests = ">=2.0.1,<3.0.0" name = "responses" version = "0.24.1" description = "A utility library for mocking out the `requests` Python library." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3531,6 +3675,7 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rfc3987" version = "1.3.8" description = "Parsing and validation of URIs (RFC 3986) and IRIs (RFC 3987)" +category = "dev" optional = false python-versions = "*" files = [ @@ -3542,6 +3687,7 @@ files = [ name = "roundrobin" version = "0.0.4" description = "Collection of roundrobin utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -3552,6 +3698,7 @@ files = [ name = "rsa" version = "4.7.2" description = "Pure-Python RSA implementation" +category = "main" optional = false python-versions = ">=3.5, <4" files = [ @@ -3566,6 +3713,7 @@ pyasn1 = ">=0.1.3" name = "s3transfer" version = "0.10.0" description = "An Amazon S3 Transfer Manager" +category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -3583,6 +3731,7 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] name = "setuptools" version = "69.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3599,6 +3748,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "simple-salesforce" version = "1.12.5" description = "A basic Salesforce.com REST API client." +category = "main" optional = false python-versions = "*" files = [ @@ -3618,6 +3768,7 @@ zeep = "*" name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3629,6 +3780,7 @@ files = [ name = "smartypants" version = "2.0.1" description = "Python with the SmartyPants" +category = "main" optional = false python-versions = "*" files = [ @@ -3639,6 +3791,7 @@ files = [ name = "sqlalchemy" version = "1.4.51" description = "Database Abstraction Library" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -3718,6 +3871,7 @@ sqlcipher = ["sqlcipher3_binary"] name = "sqlalchemy-stubs" version = "0.4" description = "SQLAlchemy stubs and mypy plugin" +category = "dev" optional = false python-versions = "*" files = [ @@ -3733,6 +3887,7 @@ typing-extensions = ">=3.7.4" name = "sqlalchemy2-stubs" version = "0.0.2a38" description = "Typing Stubs for SQLAlchemy 1.4" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3747,6 +3902,7 @@ typing-extensions = ">=3.7.4" name = "statsd" version = "3.3.0" description = "A simple statsd client." +category = "main" optional = false python-versions = "*" files = [ @@ -3758,6 +3914,7 @@ files = [ name = "strict-rfc3339" version = "0.7" description = "Strict, simple, lightweight RFC3339 functions" +category = "dev" optional = false python-versions = "*" files = [ @@ -3768,6 +3925,7 @@ files = [ name = "tldextract" version = "3.4.4" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3785,6 +3943,7 @@ requests-file = ">=1.4" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3796,6 +3955,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3807,6 +3967,7 @@ files = [ name = "tqdm" version = "4.66.1" description = "Fast, Extensible Progress Meter" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3827,6 +3988,7 @@ telegram = ["requests"] name = "typer" version = "0.9.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3848,6 +4010,7 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. name = "types-boto" version = "2.49.18.9" description = "Typing stubs for boto" +category = "dev" optional = false python-versions = "*" files = [ @@ -3859,6 +4022,7 @@ files = [ name = "types-mock" version = "4.0.15.2" description = "Typing stubs for mock" +category = "dev" optional = false python-versions = "*" files = [ @@ -3870,6 +4034,7 @@ files = [ name = "types-pyopenssl" version = "23.3.0.0" description = "Typing stubs for pyOpenSSL" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3884,6 +4049,7 @@ cryptography = ">=35.0.0" name = "types-python-dateutil" version = "2.8.19.20240106" description = "Typing stubs for python-dateutil" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3895,6 +4061,7 @@ files = [ name = "types-pytz" version = "2022.7.1.2" description = "Typing stubs for pytz" +category = "dev" optional = false python-versions = "*" files = [ @@ -3906,6 +4073,7 @@ files = [ name = "types-redis" version = "4.6.0.20240106" description = "Typing stubs for redis" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3921,6 +4089,7 @@ types-pyOpenSSL = "*" name = "types-requests" version = "2.31.0.20240106" description = "Typing stubs for requests" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3935,6 +4104,7 @@ urllib3 = ">=2" name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3946,6 +4116,7 @@ files = [ name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" +category = "main" optional = false python-versions = ">=2" files = [ @@ -3957,6 +4128,7 @@ files = [ name = "unidecode" version = "1.3.8" description = "ASCII transliterations of Unicode text" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3968,6 +4140,7 @@ files = [ name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3985,6 +4158,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "vine" version = "5.1.0" description = "Python promises." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3996,6 +4170,7 @@ files = [ name = "wcwidth" version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -4007,6 +4182,7 @@ files = [ name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" +category = "main" optional = false python-versions = "*" files = [ @@ -4018,6 +4194,7 @@ files = [ name = "werkzeug" version = "2.3.7" description = "The comprehensive WSGI web application library." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4035,6 +4212,7 @@ watchdog = ["watchdog (>=2.3)"] name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" +category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -4046,6 +4224,7 @@ files = [ name = "yarl" version = "1.9.3" description = "Yet another URL library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4149,6 +4328,7 @@ multidict = ">=4.0" name = "zeep" version = "4.2.1" description = "A Python SOAP client" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4176,6 +4356,7 @@ xmlsec = ["xmlsec (>=0.6.1)"] name = "zope-event" version = "5.0" description = "Very basic event publishing system" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4194,6 +4375,7 @@ test = ["zope.testrunner"] name = "zope-interface" version = "6.1" description = "Interfaces for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4246,4 +4428,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "96a15f8c5b9c35b9e581546744ec06b0f17dcf709d49db7f424a585243af67c8" +content-hash = "e7aced65ef6042df1c2ea1d4f4910a22767a8a521c3f1cf0bf0b191c41b2208a" diff --git a/pyproject.toml b/pyproject.toml index 0df484ce40..e92314b4c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ Werkzeug = "2.3.7" MarkupSafe = "2.1.4" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.1.2" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", rev = "2da74685e0ffb220f0403e1f2584e783be99bbad" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", branch = "upgrade-cryptography" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.7.1" greenlet = "2.0.2" @@ -72,7 +72,6 @@ simple-salesforce = "^1.12.3" # Pinned dependencies certifi = "^2023.7.22" # pinned for security reasons: https://github.com/cds-snc/notification-api/security/dependabot/119 -cryptography = "^41.0.2" # pinned for security reasons: https://github.com/cds-snc/notification-api/security/dependabot/118 idna = "2.10" # pinned to align with test moto dependency requirements (for <=2.9) [tool.poetry.group.test.dependencies] diff --git a/tests-perf/ops/Dockerfile b/tests-perf/ops/Dockerfile index fad9c6f3ee..ba36582848 100644 --- a/tests-perf/ops/Dockerfile +++ b/tests-perf/ops/Dockerfile @@ -1,10 +1,10 @@ FROM python:3.10-alpine3.16@sha256:afe68972cc00883d70b3760ee0ffbb7375cf09706c122dda7063ffe64c5be21b ENV PYTHONDONTWRITEBYTECODE 1 -ENV POETRY_VERSION "1.3.2" +ENV POETRY_VERSION "1.7.1" ENV APP_VENV="/app/.venv" ENV POETRY_HOME="/opt/poetry" -ENV POETRY_VERSION="1.3.2" +ENV POETRY_VERSION="1.7.1" ENV POETRY_VIRTUALENVS_CREATE="false" ENV PATH="${APP_VENV}/bin:${POETRY_HOME}/bin:$PATH" From f4c7a95f705581af548ae39e3ccc3793e56d5dc6 Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 22 Feb 2024 11:49:27 -0400 Subject: [PATCH 07/33] Fix/dashboard statistics (#2113) * fix: update dates that are used to popupate ft_notification_status table to use midnight UTC like everything else * fix(get_notifications_for_service): use midnight UTC to stay consistent with the rest of the app * chore: remove unused imports * chore: formatting * chore: remove unused import * test: ensure `get_all_notifications_for_service()` counts notifications for all hours of the day for the entire week * test: ensure `ft_notification_status` includes notifications from all hours of a given day (`00:00:00` to `23:59:59`) * chore: formatting * chore: remove unused import --------- Co-authored-by: William B <7444334+whabanks@users.noreply.github.com> --- app/dao/fact_notification_status_dao.py | 5 ++- app/dao/notifications_dao.py | 9 ++--- .../dao/test_fact_notification_status_dao.py | 33 +++++++++++++++++++ tests/app/service/test_rest.py | 29 ++++++++++++++++ 4 files changed, 67 insertions(+), 9 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 533ee274b3..8fdb52ec70 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -1,7 +1,6 @@ from datetime import datetime, time, timedelta from flask import current_app -from notifications_utils.timezones import convert_local_timezone_to_utc from sqlalchemy import Date, case, func from sqlalchemy.dialects.postgresql import insert from sqlalchemy.sql.expression import extract, literal @@ -39,8 +38,8 @@ def fetch_notification_status_for_day(process_day, service_id=None): - start_date = convert_local_timezone_to_utc(datetime.combine(process_day, time.min)) - end_date = convert_local_timezone_to_utc(datetime.combine(process_day + timedelta(days=1), time.min)) + start_date = datetime.combine(process_day, time.min) + end_date = datetime.combine(process_day + timedelta(days=1), time.min) # use notification_history if process day is older than 7 days # this is useful if we need to rebuild the ft_billing table for a date older than 7 days ago. current_app.logger.info("Fetch ft_notification_status for {} to {}".format(start_date, end_date)) diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index a39af634f1..9e20bca476 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -27,6 +27,7 @@ from app import create_uuid, db, signer_personalisation from app.aws.s3 import get_s3_bucket_objects, remove_s3_object from app.dao.dao_utils import transactional +from app.dao.date_util import utc_midnight_n_days_ago from app.errors import InvalidRequest from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE from app.models import ( @@ -51,11 +52,7 @@ Service, ServiceDataRetention, ) -from app.utils import ( - escape_special_characters, - get_local_timezone_midnight_in_utc, - midnight_n_days_ago, -) +from app.utils import escape_special_characters, get_local_timezone_midnight_in_utc @transactional @@ -335,7 +332,7 @@ def get_notifications_for_service( filters = [Notification.service_id == service_id] if limit_days is not None: - filters.append(Notification.created_at >= midnight_n_days_ago(limit_days)) + filters.append(Notification.created_at >= utc_midnight_n_days_ago(limit_days)) if older_than is not None: older_than_created_at = db.session.query(Notification.created_at).filter(Notification.id == older_than).as_scalar() diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index e19d5adbae..ce10e24777 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -331,6 +331,39 @@ def test_fetch_notification_status_by_template_for_service_for_today_and_7_previ ] == sorted(results, key=lambda x: (x.notification_type, x.status, x.template_name, x.count)) +@freeze_time("2018-10-31T18:00:00") +def test_fetch_notification_status_gets_data_from_correct_timeframe( + notify_db_session, +): + service_1 = create_service(service_name="service_1") + sms_template = create_template(service=service_1, template_type=SMS_TYPE) + email_template = create_template(service=service_1, template_type=EMAIL_TYPE) + + # create notifications for every hour of the day + for i in range(24): + save_notification(create_notification(email_template, created_at=datetime(2018, 10, 30, i, 0, 0), status="delivered")) + save_notification(create_notification(email_template, created_at=datetime(2018, 10, 30, i, 0, 59), status="delivered")) + save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 30, i, 0, 0), status="delivered")) + save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 30, i, 0, 30), status="delivered")) + save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 30, i, 0, 59), status="delivered")) + + # too early, shouldn't be included + save_notification( + create_notification( + service_1.templates[0], + created_at=datetime(2018, 10, 29, 23, 59, 59), + status="delivered", + ) + ) + data = fetch_notification_status_for_day(process_day=datetime.utcnow() - timedelta(days=1)) + + assert data[0].notification_type == "email" + assert data[0].notification_count == 48 + + assert data[1].notification_type == "sms" + assert data[1].notification_count == 72 + + def test_get_total_notifications_sent_for_api_key(notify_db_session): service = create_service(service_name="First Service") api_key = create_api_key(service) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index e3e309cda5..27ded50b48 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -1730,6 +1730,35 @@ def test_get_notifications_for_service_without_page_count( assert resp["notifications"][0]["id"] == str(without_job.id) +@freeze_time("2018-11-20T18:00:00") +@pytest.mark.parametrize("retention_period, expected_count_of_notifications", [(3, 72), (7, 168)]) +def test_get_notifications_for_service_gets_data_from_correct_timeframe( + admin_request, sample_service, retention_period, expected_count_of_notifications +): + email_template = create_template(service=sample_service, template_type=EMAIL_TYPE) + + # WEEK BEFORE + # Create 12 notifications for each hour of the day for 1 week + for i in range(retention_period): + for j in range(24): + save_notification( + create_notification(email_template, created_at=datetime(2018, 11, 5 + i, j, 0, 0), status="delivered") + ) + + # THIS WEEK + # Create 12 notifications for each hour of the day for 1 week + for i in range(retention_period): + for j in range(24): + save_notification( + create_notification(email_template, created_at=datetime(2018, 11, 13 + i, j, 0, 0), status="delivered") + ) + + resp = admin_request.get( + "service.get_all_notifications_for_service", service_id=email_template.service_id, limit_days=7, page_size=1 + ) + assert resp["total"] == expected_count_of_notifications + + @pytest.mark.parametrize( "should_prefix", [ From 9d1d445877c8a92adba6f18d134fe3aae259c7b8 Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Thu, 22 Feb 2024 13:29:30 -0400 Subject: [PATCH 08/33] Fix daily limit counting when scheduling jobs (#2112) * Fix daily limit counting when scheduling jobs - When scheduling a job, the daily limits in Redis will only be incremented if the job was scheduled for the current day - When refreshing daily count keys from the DB the underlying query will now include counts from jobs scheduled for the current day - Experimenting with a new decorator for feature flags - Fixed an issue with the dev container where the poetry installation will sometimes not be detected by adding the installation dir to the $PATH * Bump utils version and update lock file * fix tests? * Fix tests * Bump waffles commit sha in test action --- .../scripts/notify-dev-entrypoint.sh | 6 +- .github/workflows/test.yaml | 2 +- app/dao/services_dao.py | 43 +++++--- app/email_limit_utils.py | 11 ++- app/job/rest.py | 35 ++++--- app/notifications/rest.py | 2 +- app/v2/notifications/post_notifications.py | 5 +- tests/app/dao/test_services_dao.py | 97 ++++++++++++++++++- tests/app/job/test_rest.py | 1 + .../rest/test_send_notification.py | 2 +- 10 files changed, 164 insertions(+), 40 deletions(-) diff --git a/.devcontainer/scripts/notify-dev-entrypoint.sh b/.devcontainer/scripts/notify-dev-entrypoint.sh index 22cb552d2d..e2f99ea29a 100755 --- a/.devcontainer/scripts/notify-dev-entrypoint.sh +++ b/.devcontainer/scripts/notify-dev-entrypoint.sh @@ -34,8 +34,10 @@ cd /workspace echo -e "fpath+=/.zfunc" >> ~/.zshrc echo -e "autoload -Uz compinit && compinit" -pip install poetry==${POETRY_VERSION} \ - && poetry --version +pip install poetry==${POETRY_VERSION} +export PATH=$PATH:/home/vscode/.local/bin/ +which poetry +poetry --version # Initialize poetry autocompletions mkdir ~/.zfunc diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 3169d77d64..8d5c023e27 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -67,7 +67,7 @@ jobs: run: | cp -f .env.example .env - name: Checks for new endpoints against AWS WAF rules - uses: cds-snc/notification-utils/.github/actions/waffles@2da74685e0ffb220f0403e1f2584e783be99bbad # 52.1.0 + uses: cds-snc/notification-utils/.github/actions/waffles@06a40db6286f525fe3551e029418458d33342592 # 52.1.0 with: app-loc: '/github/workspace' app-libs: '/github/workspace/env/site-packages' diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 80d1f25c35..082a2566a1 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -23,6 +23,7 @@ CROWN_ORGANISATION_TYPES, EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, + JOB_STATUS_SCHEDULED, KEY_TYPE_TEST, NHS_ORGANISATION_TYPES, NON_CROWN_ORGANISATION_TYPES, @@ -424,20 +425,24 @@ def dao_fetch_todays_stats_for_service(service_id): def fetch_todays_total_message_count(service_id): midnight = get_midnight(datetime.now(tz=pytz.utc)) + scheduled = ( + db.session.query(func.coalesce(func.sum(Job.notification_count), 0).label("count")).filter( + Job.service_id == service_id, + Job.job_status == JOB_STATUS_SCHEDULED, + Job.scheduled_for >= midnight, + Job.scheduled_for < midnight + timedelta(days=1), + ) + ).first() + result = ( - db.session.query(func.count(Notification.id).label("count")) - .filter( + db.session.query(func.coalesce(func.count(Notification.id), 0).label("count")).filter( Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST, - Notification.created_at > midnight, + Notification.created_at >= midnight, ) - .group_by( - Notification.notification_type, - Notification.status, - ) - .first() - ) - return 0 if result is None else result.count + ).first() + + return result.count + scheduled.count def fetch_todays_total_sms_count(service_id): @@ -461,17 +466,25 @@ def fetch_service_email_limit(service_id: uuid.UUID) -> int: def fetch_todays_total_email_count(service_id: uuid.UUID) -> int: midnight = get_midnight(datetime.now(tz=pytz.utc)) + scheduled = ( + db.session.query(func.coalesce(func.sum(Job.notification_count), 0).label("total_scheduled_notifications")).filter( + Job.service_id == service_id, + Job.job_status == JOB_STATUS_SCHEDULED, + Job.scheduled_for > midnight, + Job.scheduled_for < midnight + timedelta(hours=23, minutes=59, seconds=59), + ) + ).first() + result = ( - db.session.query(func.count(Notification.id).label("total_email_notifications")) - .filter( + db.session.query(func.coalesce(func.count(Notification.id), 0).label("total_email_notifications")).filter( Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST, Notification.created_at > midnight, Notification.notification_type == "email", ) - .first() - ) - return 0 if result is None else result.total_email_notifications + ).first() + + return result.total_email_notifications + scheduled.total_scheduled_notifications def _stats_for_service_query(service_id): diff --git a/app/email_limit_utils.py b/app/email_limit_utils.py index 7086d5c6f0..0fb8bf3364 100644 --- a/app/email_limit_utils.py +++ b/app/email_limit_utils.py @@ -3,6 +3,7 @@ from flask import current_app from notifications_utils.clients.redis import email_daily_count_cache_key +from notifications_utils.decorators import requires_feature from app import redis_store from app.dao.services_dao import fetch_todays_total_email_count @@ -20,9 +21,15 @@ def fetch_todays_email_count(service_id: UUID) -> int: return int(total_email_count) +@requires_feature("REDIS_ENABLED") def increment_todays_email_count(service_id: UUID, increment_by: int) -> None: - if not current_app.config["REDIS_ENABLED"]: - return fetch_todays_email_count(service_id) # to make sure it's set in redis cache_key = email_daily_count_cache_key(service_id) redis_store.incrby(cache_key, increment_by) + + +@requires_feature("REDIS_ENABLED") +def decrement_todays_email_count(service_id: UUID, decrement_by: int) -> None: + fetch_todays_email_count(service_id) + cache_key = email_daily_count_cache_key(service_id) + redis_store.decrby(cache_key, decrement_by) diff --git a/app/job/rest.py b/app/job/rest.py index 3c443ec4ba..49990f2961 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -1,3 +1,5 @@ +from datetime import datetime + import dateutil from flask import Blueprint, current_app, jsonify, request from notifications_utils.recipients import RecipientCSV @@ -20,6 +22,7 @@ from app.dao.notifications_dao import get_notifications_for_job from app.dao.services_dao import dao_fetch_service_by_id from app.dao.templates_dao import dao_get_template_by_id +from app.email_limit_utils import decrement_todays_email_count from app.errors import InvalidRequest, register_errors from app.models import ( EMAIL_TYPE, @@ -67,7 +70,7 @@ def cancel_job(service_id, job_id): job = dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id) job.job_status = JOB_STATUS_CANCELLED dao_update_job(job) - + decrement_todays_email_count(service_id, job.notification_count) return get_job_by_service_and_job_id(service_id, job_id) @@ -137,15 +140,23 @@ def create_job(service_id): raise InvalidRequest("Create job is not allowed: service is inactive ", 403) data = request.get_json() - data.update({"service": service_id}) + try: data.update(**get_job_metadata_from_s3(service_id, data["id"])) except KeyError: raise InvalidRequest({"id": ["Missing data for required field."]}, status_code=400) + if data.get("valid") != "True": + raise InvalidRequest("File is not valid, can't create job", 400) + data["template"] = data.pop("template_id") + template = dao_get_template_by_id(data["template"]) + template_errors = unarchived_template_schema.validate({"archived": template.archived}) + + if template_errors: + raise InvalidRequest(template_errors, status_code=400) job = get_job_from_s3(service_id, data["id"]) recipient_csv = RecipientCSV( @@ -170,22 +181,16 @@ def create_job(service_id): if not is_test_notification: check_sms_daily_limit(service, len(recipient_csv)) + increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv)) - if template.template_type == EMAIL_TYPE: + elif template.template_type == EMAIL_TYPE: check_email_daily_limit(service, len(list(recipient_csv.get_rows()))) + scheduled_for = datetime.fromisoformat(form.get("scheduled_for")) if form.get("scheduled_for") else None # noqa: F821 - if data.get("valid") != "True": - raise InvalidRequest("File is not valid, can't create job", 400) - - errors = unarchived_template_schema.validate({"archived": template.archived}) - - if errors: - raise InvalidRequest(errors, status_code=400) - - if template.template_type == SMS_TYPE and not is_test_notification: - increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv)) - elif template.template_type == EMAIL_TYPE: - increment_email_daily_count_send_warnings_if_needed(service, len(list(recipient_csv.get_rows()))) + if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): + increment_email_daily_count_send_warnings_if_needed( + authenticated_service, len(list(recipient_csv.get_rows())) # noqa: F821 + ) data.update({"template_version": template.version}) diff --git a/app/notifications/rest.py b/app/notifications/rest.py index 70b09228f8..e3ec327df6 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -112,7 +112,7 @@ def send_notification(notification_type: NotificationType): ) simulated = simulated_recipient(notification_form["to"], notification_type) - if not simulated != api_user.key_type == KEY_TYPE_TEST: + if not simulated != api_user.key_type == KEY_TYPE_TEST and notification_type == EMAIL_TYPE: check_email_daily_limit(authenticated_service, 1) check_template_is_for_notification_type(notification_type, template.template_type) diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index d9d333c0e0..9a1640b9a2 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -223,7 +223,10 @@ def post_bulk(): if template.template_type == EMAIL_TYPE and api_user.key_type != KEY_TYPE_TEST: check_email_daily_limit(authenticated_service, len(list(recipient_csv.get_rows()))) - increment_email_daily_count_send_warnings_if_needed(authenticated_service, len(list(recipient_csv.get_rows()))) + scheduled_for = datetime.fromisoformat(form.get("scheduled_for")) if form.get("scheduled_for") else None + + if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): + increment_email_daily_count_send_warnings_if_needed(authenticated_service, len(list(recipient_csv.get_rows()))) if template.template_type == SMS_TYPE: # calculate the number of simulated recipients diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index 4a81ea4f86..46941c9887 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -46,6 +46,7 @@ from app.models import ( EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, + JOB_STATUS_SCHEDULED, KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, @@ -68,6 +69,7 @@ user_folder_permissions, ) from app.schemas import service_schema +from tests.app.conftest import create_sample_job from tests.app.db import ( create_annual_billing, create_api_key, @@ -1396,7 +1398,7 @@ def create_email_sms_letter_template(): class TestServiceEmailLimits: - def test_get_email_count_for_service(self, notify_db_session): + def test_get_email_count_for_service(self): active_user_1 = create_user(email="active1@foo.com", state="active") service = Service( name="service_name", @@ -1432,7 +1434,98 @@ def test_dao_fetch_todays_total_message_count_returns_0_with_yesterday_messages( notification = save_notification( create_notification( created_at=yesterday, - template=create_template(service=create_service(service_name="tester"), template_type="email"), + template=create_template(service=create_service(service_name="tester123"), template_type="email"), ) ) assert fetch_todays_total_message_count(notification.service.id) == 0 + + def test_dao_fetch_todays_total_message_count_counts_notifications_in_jobs_scheduled_for_today( + self, notify_db, notify_db_session + ): + service = create_service(service_name="tester12") + template = create_template(service=service, template_type="email") + today = datetime.utcnow().date() + + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today, + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + save_notification( + create_notification( + created_at=today, + template=template, + ) + ) + assert fetch_todays_total_message_count(service.id) == 11 + + def test_dao_fetch_todays_total_message_count_counts_notifications_in_jobs_scheduled_for_today_but_not_after_today( + self, notify_db, notify_db_session + ): + service = create_service() + template = create_template(service=service, template_type="email") + today = datetime.utcnow().date() + + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today, + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + save_notification( + create_notification( + created_at=today, + template=template, + ) + ) + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today + timedelta(days=1), + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + + assert fetch_todays_total_message_count(service.id) == 11 + + def test_dao_fetch_todays_total_message_count_counts_notifications_in_jobs_scheduled_for_today_but_not_before_today( + self, notify_db, notify_db_session + ): + service = create_service() + template = create_template(service=service, template_type="email") + today = datetime.utcnow().date() + + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today, + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today - timedelta(days=1), + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + save_notification( + create_notification( + created_at=today, + template=template, + ) + ) + assert fetch_todays_total_message_count(service.id) == 11 diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index e184da8199..932e9c7834 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -451,6 +451,7 @@ def test_create_job_returns_404_if_template_does_not_exist(client, sample_servic "app.job.rest.get_job_metadata_from_s3", return_value={ "template_id": str(sample_service.id), + "valid": "True", }, ) data = { diff --git a/tests/app/notifications/rest/test_send_notification.py b/tests/app/notifications/rest/test_send_notification.py index 71f8c63ec6..a1cf1f0d52 100644 --- a/tests/app/notifications/rest/test_send_notification.py +++ b/tests/app/notifications/rest/test_send_notification.py @@ -466,7 +466,7 @@ def test_should_allow_api_call_if_under_day_limit_regardless_of_type( sms_template = create_sample_template(notify_db, notify_db_session, service=service) create_sample_notification(notify_db, notify_db_session, template=email_template, service=service) - data = {"to": sample_user.mobile_number, "template": str(sms_template.id)} + data = {"to": sample_user.mobile_number, "template": str(sms_template.id), "valid": "True"} auth_header = create_authorization_header(service_id=service.id) From c4489b802baad6f00b2b35b00ca3a8a29e893f47 Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Fri, 23 Feb 2024 13:38:51 -0400 Subject: [PATCH 09/33] Fix missing import (#2118) * Fix missing import - Added tests to cover a blind spot that caused this issue to slip by in the first place * Use service we already fetched not authenticated_service * Remove noqa comments --- app/job/rest.py | 6 ++-- tests/app/job/test_rest.py | 65 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+), 4 deletions(-) diff --git a/app/job/rest.py b/app/job/rest.py index 49990f2961..950f1554c9 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -185,12 +185,10 @@ def create_job(service_id): elif template.template_type == EMAIL_TYPE: check_email_daily_limit(service, len(list(recipient_csv.get_rows()))) - scheduled_for = datetime.fromisoformat(form.get("scheduled_for")) if form.get("scheduled_for") else None # noqa: F821 + scheduled_for = datetime.fromisoformat(data.get("scheduled_for")) if data.get("scheduled_for") else None if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): - increment_email_daily_count_send_warnings_if_needed( - authenticated_service, len(list(recipient_csv.get_rows())) # noqa: F821 - ) + increment_email_daily_count_send_warnings_if_needed(service, len(list(recipient_csv.get_rows()))) data.update({"template_version": template.version}) diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 932e9c7834..76f89ebdf9 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -125,6 +125,71 @@ def test_cancel_letter_job_does_not_call_cancel_if_can_letter_job_be_cancelled_r assert response["message"] == "Sorry, it's too late, letters have already been sent." +def test_create_unscheduled_email_job_increments_daily_count(client, mocker, sample_email_job, fake_uuid): + mocker.patch("app.celery.tasks.process_job.apply_async") + mocker.patch("app.job.rest.increment_email_daily_count_send_warnings_if_needed") + mocker.patch( + "app.job.rest.get_job_metadata_from_s3", + return_value={ + "template_id": sample_email_job.template_id, + "original_file_name": sample_email_job.original_file_name, + "notification_count": "1", + "valid": "True", + }, + ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="email address\r\nsome@email.com", + ) + mocker.patch("app.dao.services_dao.dao_fetch_service_by_id", return_value=sample_email_job.service) + data = { + "id": fake_uuid, + "created_by": str(sample_email_job.created_by.id), + } + path = "/service/{}/job".format(sample_email_job.service_id) + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + + response = client.post(path, data=json.dumps(data), headers=headers) + + assert response.status_code == 201 + + app.celery.tasks.process_job.apply_async.assert_called_once_with(([str(fake_uuid)]), queue="job-tasks") + app.job.rest.increment_email_daily_count_send_warnings_if_needed.assert_called_once_with(sample_email_job.service, 1) + + +def test_create_future_not_same_day_scheduled_email_job_does_not_increment_daily_count( + client, mocker, sample_email_job, fake_uuid +): + scheduled_date = (datetime.utcnow() + timedelta(hours=36, minutes=59)).isoformat() + mocker.patch("app.celery.tasks.process_job.apply_async") + mocker.patch("app.job.rest.increment_email_daily_count_send_warnings_if_needed") + mocker.patch( + "app.job.rest.get_job_metadata_from_s3", + return_value={ + "template_id": sample_email_job.template_id, + "original_file_name": sample_email_job.original_file_name, + "notification_count": "1", + "valid": "True", + }, + ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="email address\r\nsome@email.com", + ) + mocker.patch("app.dao.services_dao.dao_fetch_service_by_id", return_value=sample_email_job.service) + data = {"id": fake_uuid, "created_by": str(sample_email_job.created_by.id), "scheduled_for": scheduled_date} + path = "/service/{}/job".format(sample_email_job.service_id) + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + + response = client.post(path, data=json.dumps(data), headers=headers) + + assert response.status_code == 201 + + app.job.rest.increment_email_daily_count_send_warnings_if_needed.assert_not_called() + + def test_create_unscheduled_job(client, sample_template, mocker, fake_uuid): mocker.patch("app.celery.tasks.process_job.apply_async") mocker.patch( From 67203214b9f2cb876225180865ea2dd6d202d378 Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Mon, 26 Feb 2024 11:54:26 -0400 Subject: [PATCH 10/33] Update utils ref from branch to tag (#2119) --- poetry.lock | 185 ++----------------------------------------------- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 180 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5482ec4aa3..cd2b5ca1ba 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" version = "3.9.1" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -101,7 +100,6 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -116,7 +114,6 @@ frozenlist = ">=1.1.0" name = "alembic" version = "1.12.1" description = "A database migration tool for SQLAlchemy." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -136,7 +133,6 @@ tz = ["python-dateutil"] name = "amqp" version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -151,7 +147,6 @@ vine = ">=5.0.0,<6.0.0" name = "annotated-types" version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -163,7 +158,6 @@ files = [ name = "apig-wsgi" version = "2.18.0" description = "Wrap a WSGI application in an AWS Lambda handler function for running on API Gateway or an ALB." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -175,7 +169,6 @@ files = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -187,7 +180,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -206,7 +198,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "aws-embedded-metrics" version = "1.0.8" description = "AWS Embedded Metrics Package" -category = "main" optional = false python-versions = "*" files = [ @@ -221,7 +212,6 @@ aiohttp = "*" name = "awscli" version = "1.32.25" description = "Universal Command Line Environment for AWS." -category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -241,7 +231,6 @@ s3transfer = ">=0.10.0,<0.11.0" name = "awscli-cwlogs" version = "1.4.6" description = "AWSCLI CloudWatch Logs plugin" -category = "main" optional = false python-versions = "*" files = [ @@ -258,7 +247,6 @@ six = ">=1.1.0" name = "bcrypt" version = "4.1.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -289,7 +277,6 @@ typecheck = ["mypy"] name = "billiard" version = "4.2.0" description = "Python multiprocessing fork with improvements and bugfixes" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -301,7 +288,6 @@ files = [ name = "black" version = "23.7.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -347,7 +333,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -366,7 +351,6 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "blinker" version = "1.7.0" description = "Fast, simple object-to-object and broadcast signaling" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -378,7 +362,6 @@ files = [ name = "boto" version = "2.49.0" description = "Amazon Web Services Library" -category = "main" optional = false python-versions = "*" files = [ @@ -390,7 +373,6 @@ files = [ name = "boto3" version = "1.34.25" description = "The AWS SDK for Python" -category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -410,7 +392,6 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] name = "botocore" version = "1.34.25" description = "Low-level, data-driven core of boto 3." -category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -430,7 +411,6 @@ crt = ["awscrt (==0.19.19)"] name = "brotli" version = "1.1.0" description = "Python bindings for the Brotli compression library" -category = "dev" optional = false python-versions = "*" files = [ @@ -523,7 +503,6 @@ files = [ name = "cachelib" version = "0.10.2" description = "A collection of cache libraries in the same API interface." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -535,7 +514,6 @@ files = [ name = "cachetools" version = "4.2.4" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = "~=3.5" files = [ @@ -547,7 +525,6 @@ files = [ name = "celery" version = "5.3.6" description = "Distributed Task Queue." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -609,7 +586,6 @@ zstd = ["zstandard (==0.22.0)"] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -621,7 +597,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -698,7 +673,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -798,7 +772,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -813,7 +786,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-datetime" version = "0.2" description = "Datetime type support for click." -category = "main" optional = false python-versions = "*" files = [ @@ -831,7 +803,6 @@ dev = ["wheel"] name = "click-didyoumean" version = "0.3.0" description = "Enables git-like *did-you-mean* feature in click" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -846,7 +817,6 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -category = "main" optional = false python-versions = "*" files = [ @@ -864,7 +834,6 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -883,7 +852,6 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.4" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -895,7 +863,6 @@ files = [ name = "configargparse" version = "1.7" description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -911,7 +878,6 @@ yaml = ["PyYAML"] name = "coverage" version = "5.5" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" files = [ @@ -979,7 +945,6 @@ toml = ["toml"] name = "coveralls" version = "3.3.1" description = "Show coverage stats online via coveralls.io" -category = "dev" optional = false python-versions = ">= 3.5" files = [ @@ -988,7 +953,7 @@ files = [ ] [package.dependencies] -coverage = ">=4.1,<6.0.0 || >6.1,<6.1.1 || >6.1.1,<7.0" +coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" docopt = ">=0.6.1" requests = ">=1.0.0" @@ -999,7 +964,6 @@ yaml = ["PyYAML (>=3.10)"] name = "cryptography" version = "42.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1054,7 +1018,6 @@ test-randomorder = ["pytest-randomly"] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" -category = "main" optional = false python-versions = "*" files = [ @@ -1065,7 +1028,6 @@ files = [ name = "docutils" version = "0.16" description = "Docutils -- Python Documentation Utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1077,7 +1039,6 @@ files = [ name = "environs" version = "9.5.0" description = "simplified environment variable parsing" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1099,7 +1060,6 @@ tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1114,7 +1074,6 @@ test = ["pytest (>=6)"] name = "execnet" version = "2.0.2" description = "execnet: rapid multi-Python deployment" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1129,7 +1088,6 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] name = "fido2" version = "0.9.3" description = "Python based FIDO 2.0 library" -category = "main" optional = false python-versions = ">=2.7.6,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" files = [ @@ -1147,7 +1105,6 @@ pcsc = ["pyscard"] name = "filelock" version = "3.13.1" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1164,7 +1121,6 @@ typing = ["typing-extensions (>=4.8)"] name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -1181,7 +1137,6 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.3" description = "A simple framework for building complex web applications." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1204,7 +1159,6 @@ dotenv = ["python-dotenv"] name = "flask-basicauth" version = "0.2.0" description = "HTTP basic access authentication for Flask." -category = "dev" optional = false python-versions = "*" files = [ @@ -1218,7 +1172,6 @@ Flask = "*" name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." -category = "main" optional = false python-versions = "*" files = [ @@ -1234,7 +1187,6 @@ Flask = "*" name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" -category = "dev" optional = false python-versions = "*" files = [ @@ -1249,7 +1201,6 @@ Flask = ">=0.9" name = "flask-marshmallow" version = "0.14.0" description = "Flask + marshmallow for beautiful APIs" -category = "main" optional = false python-versions = "*" files = [ @@ -1273,7 +1224,6 @@ tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-s name = "flask-migrate" version = "2.7.0" description = "SQLAlchemy database migrations for Flask applications using Alembic" -category = "main" optional = false python-versions = "*" files = [ @@ -1290,7 +1240,6 @@ Flask-SQLAlchemy = ">=1.0" name = "flask-redis" version = "0.4.0" description = "A nice way to use Redis in your Flask app" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1310,7 +1259,6 @@ tests = ["coverage", "pytest", "pytest-mock"] name = "Flask-SQLAlchemy" version = "2.3.2.dev20231128" description = "Adds SQLAlchemy support to your Flask application" -category = "main" optional = false python-versions = "*" files = [] @@ -1330,7 +1278,6 @@ resolved_reference = "500e732dd1b975a56ab06a46bd1a20a21e682262" name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1345,7 +1292,6 @@ python-dateutil = ">=2.7" name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1416,7 +1362,6 @@ files = [ name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1427,7 +1372,6 @@ files = [ name = "gevent" version = "23.9.1" description = "Coroutine-based network library" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1490,7 +1434,6 @@ test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idn name = "geventhttpclient" version = "2.0.11" description = "http client library for gevent" -category = "dev" optional = false python-versions = "*" files = [ @@ -1614,7 +1557,6 @@ six = "*" name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -1692,7 +1634,6 @@ test = ["objgraph", "psutil"] name = "gunicorn" version = "20.1.0" description = "WSGI HTTP Server for UNIX" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1713,7 +1654,6 @@ tornado = ["tornado (>=0.2)"] name = "idna" version = "2.10" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1725,7 +1665,6 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1737,7 +1676,6 @@ files = [ name = "iso8601" version = "2.0.0" description = "Simple module to parse ISO 8601 dates" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1749,7 +1687,6 @@ files = [ name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = false python-versions = "*" files = [ @@ -1764,7 +1701,6 @@ six = "*" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1782,7 +1718,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1794,7 +1729,6 @@ files = [ name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1812,7 +1746,6 @@ i18n = ["Babel (>=2.7)"] name = "jinja2-cli" version = "0.8.2" description = "A CLI interface to Jinja2" -category = "dev" optional = false python-versions = "*" files = [ @@ -1834,7 +1767,6 @@ yaml = ["jinja2", "pyyaml"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1846,7 +1778,6 @@ files = [ name = "jsonschema" version = "3.2.0" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1868,7 +1799,6 @@ format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-va name = "kombu" version = "5.3.4" description = "Messaging library for Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1904,7 +1834,6 @@ zookeeper = ["kazoo (>=2.8.0)"] name = "locust" version = "2.16.1" description = "Developer friendly load testing framework" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1932,7 +1861,6 @@ Werkzeug = ">=2.0.0" name = "lxml" version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -2040,7 +1968,6 @@ source = ["Cython (>=0.29.35)"] name = "mako" version = "1.3.0" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2060,7 +1987,6 @@ testing = ["pytest"] name = "markupsafe" version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2130,7 +2056,6 @@ files = [ name = "marshmallow" version = "3.20.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2151,7 +2076,6 @@ tests = ["pytest", "pytz", "simplejson"] name = "marshmallow-sqlalchemy" version = "0.29.0" description = "SQLAlchemy integration with the marshmallow (de)serialization library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2174,7 +2098,6 @@ tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2186,7 +2109,6 @@ files = [ name = "mistune" version = "0.8.4" description = "The fastest markdown parser in pure Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2198,7 +2120,6 @@ files = [ name = "more-itertools" version = "8.14.0" description = "More routines for operating on iterables, beyond itertools" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2210,7 +2131,6 @@ files = [ name = "moto" version = "4.1.11" description = "" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2257,7 +2177,6 @@ xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] name = "msgpack" version = "1.0.7" description = "MessagePack serializer" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2323,7 +2242,6 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2407,7 +2325,6 @@ files = [ name = "mypy" version = "1.5.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2449,7 +2366,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2461,7 +2377,6 @@ files = [ name = "nanoid" version = "2.0.0" description = "A tiny, secure, URL-friendly, unique string ID generator for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2473,7 +2388,6 @@ files = [ name = "networkx" version = "2.8.8" description = "Python package for creating and manipulating graphs and networks" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2492,7 +2406,6 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "newrelic" version = "6.10.0.165" description = "New Relic Python Agent" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" files = [ @@ -2518,7 +2431,6 @@ infinite-tracing = ["grpcio (<2)", "protobuf (<4)"] name = "notifications-python-client" version = "6.4.1" description = "Python API client for GOV.UK Notify." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2534,7 +2446,6 @@ requests = ">=2.0.0" name = "notifications-utils" version = "52.1.3" description = "Shared python code for Notification - Provides logging utils etc." -category = "main" optional = false python-versions = "~3.10" files = [] @@ -2568,14 +2479,13 @@ werkzeug = "2.3.7" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "upgrade-cryptography" -resolved_reference = "ac0422352576898b51325f914cf98d18e66f0bb3" +reference = "52.1.3" +resolved_reference = "06a40db6286f525fe3551e029418458d33342592" [[package]] name = "ordered-set" version = "4.1.0" description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2590,7 +2500,6 @@ dev = ["black", "mypy", "pytest"] name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2602,7 +2511,6 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2614,7 +2522,6 @@ files = [ name = "pendulum" version = "2.1.2" description = "Python datetimes made easy" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2649,7 +2556,6 @@ pytzdata = ">=2020.1" name = "phonenumbers" version = "8.13.28" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." -category = "main" optional = false python-versions = "*" files = [ @@ -2661,7 +2567,6 @@ files = [ name = "platformdirs" version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2677,7 +2582,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2693,7 +2597,6 @@ testing = ["pytest", "pytest-benchmark"] name = "prompt-toolkit" version = "3.0.41" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2708,7 +2611,6 @@ wcwidth = "*" name = "psutil" version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2737,7 +2639,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2819,7 +2720,6 @@ files = [ name = "pwnedpasswords" version = "2.0.0" description = "A Python wrapper for Troy Hunt's Pwned Passwords API." -category = "main" optional = false python-versions = "*" files = [ @@ -2833,7 +2733,6 @@ future = "*" name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2845,7 +2744,6 @@ files = [ name = "py-w3c" version = "0.3.1" description = "W3C services for python." -category = "main" optional = false python-versions = "*" files = [ @@ -2856,7 +2754,6 @@ files = [ name = "pyasn1" version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2868,7 +2765,6 @@ files = [ name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2880,7 +2776,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2892,7 +2787,6 @@ files = [ name = "pycurl" version = "7.45.2" description = "PycURL -- A Python Interface To The cURL library" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2903,7 +2797,6 @@ files = [ name = "pydantic" version = "2.5.2" description = "Data validation using Python type hints" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2923,7 +2816,6 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.14.5" description = "" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3041,7 +2933,6 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3053,7 +2944,6 @@ files = [ name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3071,7 +2961,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pypdf2" version = "1.28.6" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -3083,7 +2972,6 @@ files = [ name = "pyrsistent" version = "0.20.0" description = "Persistent/Functional/Immutable data structures" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3125,7 +3013,6 @@ files = [ name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3148,7 +3035,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3167,7 +3053,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-env" version = "0.8.2" description = "py.test plugin that allows you to add environment variables." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3185,7 +3070,6 @@ test = ["coverage (>=7.2.7)", "pytest-mock (>=3.10)"] name = "pytest-forked" version = "1.6.0" description = "run tests in isolated forked subprocesses" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3201,7 +3085,6 @@ pytest = ">=3.10" name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3219,7 +3102,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "pytest-mock-resources" version = "2.9.2" description = "A pytest plugin for easily instantiating reproducible mock resources." -category = "dev" optional = false python-versions = ">=3.7,<4" files = [ @@ -3250,7 +3132,6 @@ redshift = ["boto3", "filelock", "moto", "python-on-whales (>=0.22.0)", "sqlpars name = "pytest-xdist" version = "2.5.0" description = "pytest xdist plugin for distributed testing and loop-on-failing modes" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3272,7 +3153,6 @@ testing = ["filelock"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3287,7 +3167,6 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3302,7 +3181,6 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3314,7 +3192,6 @@ files = [ name = "python-magic" version = "0.4.27" description = "File type identification using libmagic" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3326,7 +3203,6 @@ files = [ name = "python-on-whales" version = "0.67.0" description = "A Docker client for Python, designed to be fun and intuitive!" -category = "dev" optional = false python-versions = "<4,>=3.8" files = [ @@ -3335,7 +3211,7 @@ files = [ ] [package.dependencies] -pydantic = ">=1.9,<2.0.0 || >=2.1.0,<3" +pydantic = ">=1.9,<2.0.dev0 || >=2.1.dev0,<3" requests = "*" tqdm = "*" typer = ">=0.4.1" @@ -3348,7 +3224,6 @@ test = ["pytest"] name = "pytz" version = "2021.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -3360,7 +3235,6 @@ files = [ name = "pytzdata" version = "2020.1" description = "The Olson timezone database for Python." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3372,7 +3246,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "dev" optional = false python-versions = "*" files = [ @@ -3396,7 +3269,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3457,7 +3329,6 @@ files = [ name = "pyzmq" version = "25.1.1" description = "Python bindings for 0MQ" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3563,7 +3434,6 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "redis" version = "5.0.1" description = "Python client for Redis database and key-value store" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3582,7 +3452,6 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3604,7 +3473,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-file" version = "1.5.1" description = "File transport adapter for Requests" -category = "main" optional = false python-versions = "*" files = [ @@ -3620,7 +3488,6 @@ six = "*" name = "requests-mock" version = "1.11.0" description = "Mock out responses from the requests package" -category = "dev" optional = false python-versions = "*" files = [ @@ -3640,7 +3507,6 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3655,7 +3521,6 @@ requests = ">=2.0.1,<3.0.0" name = "responses" version = "0.24.1" description = "A utility library for mocking out the `requests` Python library." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3675,7 +3540,6 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rfc3987" version = "1.3.8" description = "Parsing and validation of URIs (RFC 3986) and IRIs (RFC 3987)" -category = "dev" optional = false python-versions = "*" files = [ @@ -3687,7 +3551,6 @@ files = [ name = "roundrobin" version = "0.0.4" description = "Collection of roundrobin utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -3698,7 +3561,6 @@ files = [ name = "rsa" version = "4.7.2" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.5, <4" files = [ @@ -3713,7 +3575,6 @@ pyasn1 = ">=0.1.3" name = "s3transfer" version = "0.10.0" description = "An Amazon S3 Transfer Manager" -category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -3731,7 +3592,6 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] name = "setuptools" version = "69.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3748,7 +3608,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "simple-salesforce" version = "1.12.5" description = "A basic Salesforce.com REST API client." -category = "main" optional = false python-versions = "*" files = [ @@ -3768,7 +3627,6 @@ zeep = "*" name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3780,7 +3638,6 @@ files = [ name = "smartypants" version = "2.0.1" description = "Python with the SmartyPants" -category = "main" optional = false python-versions = "*" files = [ @@ -3791,7 +3648,6 @@ files = [ name = "sqlalchemy" version = "1.4.51" description = "Database Abstraction Library" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -3871,7 +3727,6 @@ sqlcipher = ["sqlcipher3_binary"] name = "sqlalchemy-stubs" version = "0.4" description = "SQLAlchemy stubs and mypy plugin" -category = "dev" optional = false python-versions = "*" files = [ @@ -3887,7 +3742,6 @@ typing-extensions = ">=3.7.4" name = "sqlalchemy2-stubs" version = "0.0.2a38" description = "Typing Stubs for SQLAlchemy 1.4" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3902,7 +3756,6 @@ typing-extensions = ">=3.7.4" name = "statsd" version = "3.3.0" description = "A simple statsd client." -category = "main" optional = false python-versions = "*" files = [ @@ -3914,7 +3767,6 @@ files = [ name = "strict-rfc3339" version = "0.7" description = "Strict, simple, lightweight RFC3339 functions" -category = "dev" optional = false python-versions = "*" files = [ @@ -3925,7 +3777,6 @@ files = [ name = "tldextract" version = "3.4.4" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3943,7 +3794,6 @@ requests-file = ">=1.4" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3955,7 +3805,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3967,7 +3816,6 @@ files = [ name = "tqdm" version = "4.66.1" description = "Fast, Extensible Progress Meter" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3988,7 +3836,6 @@ telegram = ["requests"] name = "typer" version = "0.9.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4010,7 +3857,6 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. name = "types-boto" version = "2.49.18.9" description = "Typing stubs for boto" -category = "dev" optional = false python-versions = "*" files = [ @@ -4022,7 +3868,6 @@ files = [ name = "types-mock" version = "4.0.15.2" description = "Typing stubs for mock" -category = "dev" optional = false python-versions = "*" files = [ @@ -4034,7 +3879,6 @@ files = [ name = "types-pyopenssl" version = "23.3.0.0" description = "Typing stubs for pyOpenSSL" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4049,7 +3893,6 @@ cryptography = ">=35.0.0" name = "types-python-dateutil" version = "2.8.19.20240106" description = "Typing stubs for python-dateutil" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4061,7 +3904,6 @@ files = [ name = "types-pytz" version = "2022.7.1.2" description = "Typing stubs for pytz" -category = "dev" optional = false python-versions = "*" files = [ @@ -4073,7 +3915,6 @@ files = [ name = "types-redis" version = "4.6.0.20240106" description = "Typing stubs for redis" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4089,7 +3930,6 @@ types-pyOpenSSL = "*" name = "types-requests" version = "2.31.0.20240106" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4104,7 +3944,6 @@ urllib3 = ">=2" name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4116,7 +3955,6 @@ files = [ name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -4128,7 +3966,6 @@ files = [ name = "unidecode" version = "1.3.8" description = "ASCII transliterations of Unicode text" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4140,7 +3977,6 @@ files = [ name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4158,7 +3994,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "vine" version = "5.1.0" description = "Python promises." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4170,7 +4005,6 @@ files = [ name = "wcwidth" version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -4182,7 +4016,6 @@ files = [ name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "main" optional = false python-versions = "*" files = [ @@ -4194,7 +4027,6 @@ files = [ name = "werkzeug" version = "2.3.7" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4212,7 +4044,6 @@ watchdog = ["watchdog (>=2.3)"] name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" -category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -4224,7 +4055,6 @@ files = [ name = "yarl" version = "1.9.3" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4328,7 +4158,6 @@ multidict = ">=4.0" name = "zeep" version = "4.2.1" description = "A Python SOAP client" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4356,7 +4185,6 @@ xmlsec = ["xmlsec (>=0.6.1)"] name = "zope-event" version = "5.0" description = "Very basic event publishing system" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4375,7 +4203,6 @@ test = ["zope.testrunner"] name = "zope-interface" version = "6.1" description = "Interfaces for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4428,4 +4255,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "e7aced65ef6042df1c2ea1d4f4910a22767a8a521c3f1cf0bf0b191c41b2208a" +content-hash = "f2bf5c58fe6d2689072e7b9d4cf91976e07e76ade98dc3153977c4377b98c86e" diff --git a/pyproject.toml b/pyproject.toml index e92314b4c8..609ff16d41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ Werkzeug = "2.3.7" MarkupSafe = "2.1.4" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.1.2" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", branch = "upgrade-cryptography" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.1.3" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.7.1" greenlet = "2.0.2" From 260722e4ffdcf9b2f865583161dd6642e9f36fad Mon Sep 17 00:00:00 2001 From: Jumana B Date: Wed, 28 Feb 2024 10:10:38 -0500 Subject: [PATCH 11/33] Task: Filter Heartbeats (#2108) (#2120) * Add filter for heartbeat template * fix formatting * Edit and add a test * test for rest endpoint * Add filter heartbeats for live service data --- app/dao/fact_notification_status_dao.py | 10 +++- app/dao/services_dao.py | 7 ++- app/service/rest.py | 6 +- .../dao/test_fact_notification_status_dao.py | 57 +++++++++++++++++++ tests/app/dao/test_services_dao.py | 12 +++- tests/app/service/test_rest.py | 15 +++++ 6 files changed, 97 insertions(+), 10 deletions(-) diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 8fdb52ec70..0da5e1b143 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -148,8 +148,8 @@ def fetch_notification_status_for_service_by_month(start_date, end_date, service ) -def fetch_delivered_notification_stats_by_month(): - return ( +def fetch_delivered_notification_stats_by_month(filter_heartbeats=None): + query = ( db.session.query( func.date_trunc("month", FactNotificationStatus.bst_date).cast(db.Text).label("month"), FactNotificationStatus.notification_type, @@ -168,8 +168,12 @@ def fetch_delivered_notification_stats_by_month(): func.date_trunc("month", FactNotificationStatus.bst_date).desc(), FactNotificationStatus.notification_type, ) - .all() ) + if filter_heartbeats: + query = query.filter( + FactNotificationStatus.service_id != current_app.config["NOTIFY_SERVICE_ID"], + ) + return query.all() def fetch_notification_stats_for_trial_services(): diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 082a2566a1..1e36ace594 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -84,7 +84,7 @@ def dao_count_live_services(): ).count() -def dao_fetch_live_services_data(): +def dao_fetch_live_services_data(filter_heartbeats=None): year_start_date, year_end_date = get_current_financial_year() most_recent_annual_billing = ( @@ -176,8 +176,11 @@ def dao_fetch_live_services_data(): AnnualBilling.free_sms_fragment_limit, ) .order_by(asc(Service.go_live_at)) - .all() ) + + if filter_heartbeats: + data = data.filter(Service.id != current_app.config["NOTIFY_SERVICE_ID"]) + data = data.all() results = [] for row in data: existing_service = next((x for x in results if x["service_id"] == row.service_id), None) diff --git a/app/service/rest.py b/app/service/rest.py index 8c198b0151..8ecf13f47d 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -209,13 +209,15 @@ def find_services_by_name(): @service_blueprint.route("/live-services-data", methods=["GET"]) def get_live_services_data(): - data = dao_fetch_live_services_data() + filter_heartbeats = request.args.get("filter_heartbeats", None) == "True" + data = dao_fetch_live_services_data(filter_heartbeats=filter_heartbeats) return jsonify(data=data) @service_blueprint.route("/delivered-notifications-stats-by-month-data", methods=["GET"]) def get_delivered_notification_stats_by_month_data(): - return jsonify(data=fetch_delivered_notification_stats_by_month()) + filter_heartbeats = request.args.get("filter_heartbeats", None) == "True" + return jsonify(data=fetch_delivered_notification_stats_by_month(filter_heartbeats=filter_heartbeats)) @service_blueprint.route("/", methods=["GET"]) diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index ce10e24777..34026b596e 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -51,6 +51,7 @@ create_template, save_notification, ) +from tests.conftest import set_config def test_update_fact_notification_status(notify_db_session): @@ -761,6 +762,62 @@ def test_fetch_delivered_notification_stats_by_month(sample_service): assert results[3].count == 6 +@freeze_time("2020-11-02 14:00") +def test_fetch_delivered_notification_stats_by_month_filter_heartbeats(notify_api, sample_service): + sms_template = create_template(service=sample_service, template_type="sms", template_name="a") + email_template = create_template(service=sample_service, template_type="email", template_name="b") + + # Not counted: before GC Notify started + create_ft_notification_status( + utc_date=date(2019, 10, 10), + service=sample_service, + template=email_template, + count=3, + ) + + create_ft_notification_status( + utc_date=date(2019, 12, 10), + service=sample_service, + template=email_template, + count=3, + ) + + create_ft_notification_status( + utc_date=date(2019, 12, 5), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_DELIVERED, + count=6, + ) + + create_ft_notification_status( + utc_date=date(2020, 1, 1), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_SENT, + count=4, + ) + + # Not counted: failed notifications + create_ft_notification_status( + utc_date=date(2020, 1, 1), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_FAILED, + count=10, + ) + + create_ft_notification_status( + utc_date=date(2020, 3, 1), + service=sample_service, + template=email_template, + count=5, + ) + with set_config(notify_api, "NOTIFY_SERVICE_ID", email_template.service_id): + results = fetch_delivered_notification_stats_by_month(filter_heartbeats=True) + assert len(results) == 0 + + def test_fetch_delivered_notification_stats_by_month_empty(): assert fetch_delivered_notification_stats_by_month() == [] diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index 46941c9887..ef57322c9c 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -89,6 +89,7 @@ create_user, save_notification, ) +from tests.conftest import set_config # from unittest import mock @@ -495,7 +496,8 @@ def test_get_all_user_services_should_return_empty_list_if_no_services_for_user( @freeze_time("2019-04-23T10:00:00") -def test_dao_fetch_live_services_data(sample_user): +@pytest.mark.parametrize("filter_heartbeats", [True, False]) +def test_dao_fetch_live_services_data_filter_heartbeats(notify_api, sample_user, filter_heartbeats): org = create_organisation(organisation_type="nhs_central") service = create_service(go_live_user=sample_user, go_live_at="2014-04-20T10:00:00") template = create_template(service=service) @@ -563,8 +565,12 @@ def test_dao_fetch_live_services_data(sample_user): # 3rd service: billing from 2019 create_annual_billing(service_3.id, 200, 2019) - results = dao_fetch_live_services_data() - assert len(results) == 3 + with set_config(notify_api, "NOTIFY_SERVICE_ID", template.service_id): + results = dao_fetch_live_services_data(filter_heartbeats=filter_heartbeats) + if not filter_heartbeats: + assert len(results) == 3 + else: + assert len(results) == 2 # checks the results and that they are ordered by date: # @todo: this test is temporarily forced to pass until we can add the fiscal year back into # the query and create a new endpoint for the homepage stats diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 27ded50b48..d7eae2d564 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -70,6 +70,7 @@ create_user, save_notification, ) +from tests.conftest import set_config def test_get_service_list(client, service_factory): @@ -253,6 +254,20 @@ def test_get_delivered_notification_stats_by_month_data(admin_request, sample_se assert first["count"] == 3 +def test_get_delivered_notification_stats_by_month_data_without_heartbeat(notify_api, admin_request, sample_service): + email_template = create_template(service=sample_service, template_type="email", template_name="b") + + create_ft_notification_status( + utc_date=date(2019, 12, 10), + service=sample_service, + template=email_template, + count=3, + ) + with set_config(notify_api, "NOTIFY_SERVICE_ID", email_template.service_id): + response = admin_request.get("service.get_delivered_notification_stats_by_month_data", filter_heartbeats=True)["data"] + assert len(response) == 0 + + def test_get_service_by_id(admin_request, sample_service): json_resp = admin_request.get("service.get_service_by_id", service_id=sample_service.id) assert json_resp["data"]["name"] == sample_service.name From 807f7906ab7773ffd7154b5e9573d03f3f36978d Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Wed, 28 Feb 2024 16:43:35 -0500 Subject: [PATCH 12/33] add more logging to the remove_sms_email_jobs task (#2125) --- app/celery/nightly_tasks.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index 0a2ac8d652..c69e32018a 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -50,8 +50,11 @@ def remove_letter_csv_files(): def _remove_csv_files(job_types): jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) + current_app.logger.info("TEMP LOGGING: trying to remove {} jobs.".format(len(jobs))) for job in jobs: + current_app.logger.info("TEMP LOGGING: trying to remove Job ID {} from s3.".format(job.id)) s3.remove_job_from_s3(job.service_id, job.id) + current_app.logger.info("TEMP LOGGING: trying to archive Job ID {}".format(job.id)) dao_archive_job(job) current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) From bfadea3bb7ba83cf57d3d206e907b01bfa9239ef Mon Sep 17 00:00:00 2001 From: Jimmy Royer Date: Thu, 29 Feb 2024 12:11:25 -0500 Subject: [PATCH 13/33] Updated the PR template to latest agreed format (#2122) * Updated the PR template to latest agreed format. * Reworked format and removed question marks in reviewer's checklist. * Added French translation for the related issues section. * Reworded the reviewer's checklist. * Removed superfluous description * Added further instructions on reviewer's checklist. --- pull_request_template.md | 61 ++++++++-------------------------------- 1 file changed, 11 insertions(+), 50 deletions(-) diff --git a/pull_request_template.md b/pull_request_template.md index f1ffb136c6..483d2b2639 100644 --- a/pull_request_template.md +++ b/pull_request_template.md @@ -1,65 +1,26 @@ # Summary | Résumé -> 1-3 sentence description of the changed you're proposing, including a link to -> a GitHub Issue # or Trello card if applicable. +_TODO: 1-3 sentence description of the changed you're proposing._ ---- +## Related Issues | Cartes liées -> Description en 1 à 3 phrases de la modification proposée, avec un lien vers le -> problème (« issue ») GitHub ou la fiche Trello, le cas échéant. +* https://app.zenhub.com/workspaces/notify-planning-614b3ad91bc2030015ed22f5/issues/gh/cds-snc/notification-planning/1 +* https://app.zenhub.com/workspaces/notify-planning-core-6411dfb7c95fb80014e0cab0/issues/gh/cds-snc/notification-planning-core/1 # Test instructions | Instructions pour tester la modification -> Sequential steps (1., 2., 3., ...) that describe how to test this change. This -> will help a developer test things out without too much detective work. Also, -> include any environmental setup steps that aren't in the normal README steps -> and/or any time-based elements that this requires. - ---- - -> Étapes consécutives (1., 2., 3., …) qui décrivent la façon de tester la -> modification. Elles aideront les développeurs à faire des tests sans avoir à -> jouer au détective. Veuillez aussi inclure toutes les étapes de configuration -> de l’environnement qui ne font pas partie des étapes normales dans le fichier -> README et tout élément temporel requis. +_TODO: Fill in test instructions for the reviewer._ # Release Instructions | Instructions pour le déploiement None. -> Necessary steps to perform before and after the deployment of these changes. -> For example, emptying the cache on a feature that changes the cache data -> structure in Redis could be mentioned. - ---- - -> Étapes nécessaires à exécuter avant et après le déploiement des changements -> introduits par cette proposition. Par exemple, vider la cache suite à des -> changements modifiant une structure de données de la cache pourrait être -> mentionné. - # Reviewer checklist | Liste de vérification du réviseur -This is a suggested checklist of questions reviewers might ask during their -review | Voici une suggestion de liste de vérification comprenant des questions -que les réviseurs pourraient poser pendant leur examen : - +- [ ] This PR does not break existing functionality. +- [ ] This PR does not violate GCNotify's privacy policies. +- [ ] This PR does not raise new security concerns. Refer to our GC Notify Risk Register document on our Google drive. +- [ ] This PR does not significantly alter performance. +- [ ] Additional required documentation resulting of these changes is covered (such as the README, setup instructions, a related ADR or the technical documentation). -- [ ] Is the code maintainable? | Est-ce que le code peut être maintenu? -- [ ] Have you tested it? | L’avez-vous testé? -- [ ] Are there automated tests? | Y a-t-il des tests automatisés? -- [ ] Does this cause automated test coverage to drop? | Est-ce que ça entraîne - une baisse de la quantité de code couvert par les tests automatisés? -- [ ] Does this break existing functionality? | Est-ce que ça brise une - fonctionnalité existante? -- [ ] Does this change the privacy policy? | Est-ce que ça entraîne une - modification de la politique de confidentialité? -- [ ] Does this introduce any security concerns? | Est-ce que ça introduit des - préoccupations liées à la sécurité? -- [ ] Does this significantly alter performance? | Est-ce que ça modifie de - façon importante la performance? -- [ ] What is the risk level of using added dependencies? | Quel est le degré de - risque d’utiliser des dépendances ajoutées? -- [ ] Should any documentation be updated as a result of this? (i.e. README - setup, etc.) | Faudra-t-il mettre à jour la documentation à la suite de ce - changement (fichier README, etc.)? +> ⚠ If boxes cannot be checked off before merging the PR, they should be moved to the "Release Instructions" section with appropriate steps required to verify before release. For example, changes to celery code may require tests on staging to verify that performance has not been affected. \ No newline at end of file From 6f4903dd9f03aabca626df05ce79c4771b8b7f5f Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Tue, 5 Mar 2024 16:43:07 -0500 Subject: [PATCH 14/33] add diagnostic logging to create_job (#2130) --- app/job/rest.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/app/job/rest.py b/app/job/rest.py index 950f1554c9..a5e6c0e2ae 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -136,6 +136,7 @@ def get_jobs_by_service(service_id): @job_blueprint.route("", methods=["POST"]) def create_job(service_id): service = dao_fetch_service_by_id(service_id) + current_app.logger.info(" TEMP LOGGING 1: done dao_fetch_service_by_id") if not service.active: raise InvalidRequest("Create job is not allowed: service is inactive ", 403) @@ -146,6 +147,7 @@ def create_job(service_id): data.update(**get_job_metadata_from_s3(service_id, data["id"])) except KeyError: raise InvalidRequest({"id": ["Missing data for required field."]}, status_code=400) + current_app.logger.info(" TEMP LOGGING 2: done data.update") if data.get("valid") != "True": raise InvalidRequest("File is not valid, can't create job", 400) @@ -153,18 +155,21 @@ def create_job(service_id): data["template"] = data.pop("template_id") template = dao_get_template_by_id(data["template"]) + current_app.logger.info(" TEMP LOGGING 3: done dao_get_template_by_id") template_errors = unarchived_template_schema.validate({"archived": template.archived}) if template_errors: raise InvalidRequest(template_errors, status_code=400) job = get_job_from_s3(service_id, data["id"]) + current_app.logger.info(" TEMP LOGGING 4: done get_job_from_s3") recipient_csv = RecipientCSV( job, template_type=template.template_type, placeholders=template._as_utils_template().placeholders, template=Template(template.__dict__), ) + current_app.logger.info(" TEMP LOGGING 5: done RecipientCSV()") if template.template_type == SMS_TYPE: # calculate the number of simulated recipients @@ -189,6 +194,7 @@ def create_job(service_id): if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): increment_email_daily_count_send_warnings_if_needed(service, len(list(recipient_csv.get_rows()))) + current_app.logger.info(" TEMP LOGGING 6: done checking limits") data.update({"template_version": template.version}) @@ -198,9 +204,11 @@ def create_job(service_id): job.job_status = JOB_STATUS_SCHEDULED dao_create_job(job) + current_app.logger.info(" TEMP LOGGING 7: done dao_create_job") if job.job_status == JOB_STATUS_PENDING: process_job.apply_async([str(job.id)], queue=QueueNames.JOBS) + current_app.logger.info(" TEMP LOGGING 8: done process_job.apply_async") job_json = job_schema.dump(job) job_json["statistics"] = [] From b33730afa68bb62fbfbf28067d6f603516a5fec5 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Wed, 6 Mar 2024 14:57:03 -0500 Subject: [PATCH 15/33] Add logging and tweak recipient_csv use (#2132) --- app/job/rest.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/app/job/rest.py b/app/job/rest.py index a5e6c0e2ae..00b55e4cde 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -173,27 +173,27 @@ def create_job(service_id): if template.template_type == SMS_TYPE: # calculate the number of simulated recipients - numberOfSimulated = sum( - simulated_recipient(i["phone_number"].data, template.template_type) for i in list(recipient_csv.get_rows()) - ) - mixedRecipients = numberOfSimulated > 0 and numberOfSimulated != len(list(recipient_csv.get_rows())) + numberOfSimulated = sum(simulated_recipient(i["phone_number"].data, template.template_type) for i in recipient_csv.rows) + mixedRecipients = numberOfSimulated > 0 and numberOfSimulated != len(recipient_csv) # if they have specified testing and NON-testing recipients, raise an error if mixedRecipients: raise InvalidRequest(message="Bulk sending to testing and non-testing numbers is not supported", status_code=400) - is_test_notification = len(list(recipient_csv.get_rows())) == numberOfSimulated + is_test_notification = len(recipient_csv) == numberOfSimulated if not is_test_notification: check_sms_daily_limit(service, len(recipient_csv)) increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv)) elif template.template_type == EMAIL_TYPE: - check_email_daily_limit(service, len(list(recipient_csv.get_rows()))) + check_email_daily_limit(service, len(recipient_csv)) + current_app.logger.info(" TEMP LOGGING 6a: done check_email_daily_limit") + scheduled_for = datetime.fromisoformat(data.get("scheduled_for")) if data.get("scheduled_for") else None if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): - increment_email_daily_count_send_warnings_if_needed(service, len(list(recipient_csv.get_rows()))) + increment_email_daily_count_send_warnings_if_needed(service, len(recipient_csv)) current_app.logger.info(" TEMP LOGGING 6: done checking limits") data.update({"template_version": template.version}) From 25645042248c4996fcf079a28a64a7823a671aa0 Mon Sep 17 00:00:00 2001 From: Jumana B Date: Thu, 7 Mar 2024 10:02:44 -0500 Subject: [PATCH 16/33] Add org_id to Email Branding Table (#2128) * Add organisation_id to email_branding * fix format * fix * test updates * fixes --- app/dao/organisation_dao.py | 13 +++++- app/models.py | 8 +++- .../versions/0445_add_org_id_branding.py | 46 +++++++++++++++++++ tests/app/email_branding/test_rest.py | 7 ++- 4 files changed, 69 insertions(+), 5 deletions(-) create mode 100644 migrations/versions/0445_add_org_id_branding.py diff --git a/app/dao/organisation_dao.py b/app/dao/organisation_dao.py index 8c2ef63ddd..06ed25958d 100644 --- a/app/dao/organisation_dao.py +++ b/app/dao/organisation_dao.py @@ -2,7 +2,14 @@ from app import db from app.dao.dao_utils import transactional, version_class -from app.models import Domain, InvitedOrganisationUser, Organisation, Service, User +from app.models import ( + Domain, + EmailBranding, + InvitedOrganisationUser, + Organisation, + Service, + User, +) def dao_get_organisations(): @@ -55,6 +62,10 @@ def dao_update_organisation(organisation_id, **kwargs): domains = kwargs.pop("domains", None) num_updated = Organisation.query.filter_by(id=organisation_id).update(kwargs) + if "email_branding_id" in kwargs: + email_brand = EmailBranding.query.filter_by(id=kwargs["email_branding_id"]).one() + org = Organisation.query.get(organisation_id) + org.email_branding = email_brand if isinstance(domains, list): Domain.query.filter_by(organisation_id=organisation_id).delete() diff --git a/app/models.py b/app/models.py index 704ccf798a..81767406a1 100644 --- a/app/models.py +++ b/app/models.py @@ -276,6 +276,10 @@ class EmailBranding(BaseModel): nullable=False, default=BRANDING_ORG_NEW, ) + organisation_id = db.Column( + UUID(as_uuid=True), db.ForeignKey("organisation.id", ondelete="SET NULL"), index=True, nullable=True + ) + organisation = db.relationship("Organisation", back_populates="email_branding", foreign_keys=[organisation_id]) def serialize(self) -> dict: serialized = { @@ -285,6 +289,7 @@ def serialize(self) -> dict: "name": self.name, "text": self.text, "brand_type": self.brand_type, + "organisation_id": str(self.organisation_id) if self.organisation_id else "", } return serialized @@ -449,10 +454,9 @@ class Organisation(BaseModel): "Domain", ) - email_branding = db.relationship("EmailBranding") + email_branding = db.relationship("EmailBranding", uselist=False) email_branding_id = db.Column( UUID(as_uuid=True), - db.ForeignKey("email_branding.id"), nullable=True, ) diff --git a/migrations/versions/0445_add_org_id_branding.py b/migrations/versions/0445_add_org_id_branding.py new file mode 100644 index 0000000000..0504d5f492 --- /dev/null +++ b/migrations/versions/0445_add_org_id_branding.py @@ -0,0 +1,46 @@ +""" +Revision ID: 0445_add_org_id_branding +Revises: 0444_add_index_n_history2.py +Create Date: 2024-02-27 +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "0445_add_org_id_branding" +down_revision = "0444_add_index_n_history2" + + +def upgrade(): + op.add_column( + "email_branding", + sa.Column("organisation_id", postgresql.UUID(as_uuid=True), nullable=True), + ) + op.create_index( + op.f("ix_email_branding_organisation_id"), + "email_branding", + ["organisation_id"], + unique=False, + ) + op.create_foreign_key( + "fk_email_branding_organisation", + "email_branding", + "organisation", + ["organisation_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("fk_organisation_email_branding_id", "organisation", type_="foreignkey") + + +def downgrade(): + op.drop_index(op.f("ix_email_branding_organisation_id"), table_name="email_branding") + op.drop_constraint("fk_email_branding_organisation", "email_branding", type_="foreignkey") + op.drop_column("email_branding", "organisation_id") + op.create_foreign_key( + "fk_organisation_email_branding_id", + "organisation", + "email_branding", + ["email_branding_id"], + ["id"], + ) diff --git a/tests/app/email_branding/test_rest.py b/tests/app/email_branding/test_rest.py index c09218d62d..9d7bd1f6f7 100644 --- a/tests/app/email_branding/test_rest.py +++ b/tests/app/email_branding/test_rest.py @@ -4,8 +4,8 @@ from tests.app.db import create_email_branding -def test_get_email_branding_options(admin_request, notify_db, notify_db_session): - email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1") +def test_get_email_branding_options(admin_request, notify_db, notify_db_session, sample_organisation): + email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1", organisation_id=sample_organisation.id) email_branding2 = EmailBranding(colour="#000000", logo="/path/other.png", name="Org2") notify_db.session.add_all([email_branding1, email_branding2]) notify_db.session.commit() @@ -17,6 +17,8 @@ def test_get_email_branding_options(admin_request, notify_db, notify_db_session) str(email_branding1.id), str(email_branding2.id), } + assert email_branding[0]["organisation_id"] == str(sample_organisation.id) + assert email_branding[1]["organisation_id"] == "" def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): @@ -37,6 +39,7 @@ def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): "id", "text", "brand_type", + "organisation_id", } assert response["email_branding"]["colour"] == "#FFFFFF" assert response["email_branding"]["logo"] == "/path/image.png" From 4c0ce9a94ca62e39f0bd62c28357637306b34d7c Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Thu, 7 Mar 2024 11:49:31 -0500 Subject: [PATCH 17/33] get notification count faster (#2133) --- app/job/rest.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/app/job/rest.py b/app/job/rest.py index 00b55e4cde..214eaea866 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -136,7 +136,6 @@ def get_jobs_by_service(service_id): @job_blueprint.route("", methods=["POST"]) def create_job(service_id): service = dao_fetch_service_by_id(service_id) - current_app.logger.info(" TEMP LOGGING 1: done dao_fetch_service_by_id") if not service.active: raise InvalidRequest("Create job is not allowed: service is inactive ", 403) @@ -147,7 +146,6 @@ def create_job(service_id): data.update(**get_job_metadata_from_s3(service_id, data["id"])) except KeyError: raise InvalidRequest({"id": ["Missing data for required field."]}, status_code=400) - current_app.logger.info(" TEMP LOGGING 2: done data.update") if data.get("valid") != "True": raise InvalidRequest("File is not valid, can't create job", 400) @@ -155,21 +153,18 @@ def create_job(service_id): data["template"] = data.pop("template_id") template = dao_get_template_by_id(data["template"]) - current_app.logger.info(" TEMP LOGGING 3: done dao_get_template_by_id") template_errors = unarchived_template_schema.validate({"archived": template.archived}) if template_errors: raise InvalidRequest(template_errors, status_code=400) job = get_job_from_s3(service_id, data["id"]) - current_app.logger.info(" TEMP LOGGING 4: done get_job_from_s3") recipient_csv = RecipientCSV( job, template_type=template.template_type, placeholders=template._as_utils_template().placeholders, template=Template(template.__dict__), ) - current_app.logger.info(" TEMP LOGGING 5: done RecipientCSV()") if template.template_type == SMS_TYPE: # calculate the number of simulated recipients @@ -187,14 +182,13 @@ def create_job(service_id): increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv)) elif template.template_type == EMAIL_TYPE: - check_email_daily_limit(service, len(recipient_csv)) - current_app.logger.info(" TEMP LOGGING 6a: done check_email_daily_limit") + notification_count = int(data.get("notification_count", len(recipient_csv))) + check_email_daily_limit(service, notification_count) scheduled_for = datetime.fromisoformat(data.get("scheduled_for")) if data.get("scheduled_for") else None if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): - increment_email_daily_count_send_warnings_if_needed(service, len(recipient_csv)) - current_app.logger.info(" TEMP LOGGING 6: done checking limits") + increment_email_daily_count_send_warnings_if_needed(service, notification_count) data.update({"template_version": template.version}) @@ -204,11 +198,9 @@ def create_job(service_id): job.job_status = JOB_STATUS_SCHEDULED dao_create_job(job) - current_app.logger.info(" TEMP LOGGING 7: done dao_create_job") if job.job_status == JOB_STATUS_PENDING: process_job.apply_async([str(job.id)], queue=QueueNames.JOBS) - current_app.logger.info(" TEMP LOGGING 8: done process_job.apply_async") job_json = job_schema.dump(job) job_json["statistics"] = [] From 9117ec7dd81fe99def8a40f9ebbb35f1b29e5ec9 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Thu, 7 Mar 2024 14:04:03 -0500 Subject: [PATCH 18/33] Fix slow create_job for real this time (#2135) --- app/job/rest.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/app/job/rest.py b/app/job/rest.py index 214eaea866..8b29b73ccf 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -182,7 +182,14 @@ def create_job(service_id): increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv)) elif template.template_type == EMAIL_TYPE: - notification_count = int(data.get("notification_count", len(recipient_csv))) + if "notification_count" in data: + notification_count = int(data["notification_count"]) + else: + current_app.logger.warning( + f"notification_count not in metadata for job {data['id']}, using len(recipient_csv) instead." + ) + notification_count = len(recipient_csv) + check_email_daily_limit(service, notification_count) scheduled_for = datetime.fromisoformat(data.get("scheduled_for")) if data.get("scheduled_for") else None From da146e2d211d2037cb8ec9e08d78f78eab7fbf0e Mon Sep 17 00:00:00 2001 From: Ben Larabie Date: Thu, 7 Mar 2024 14:54:16 -0500 Subject: [PATCH 19/33] K8s rollout workflow (#2134) Remotely launch k8s workflow --- .github/workflows/docker.yaml | 51 ++++++++--------------------------- 1 file changed, 11 insertions(+), 40 deletions(-) diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml index e4c5312d51..5fa4e2f343 100644 --- a/.github/workflows/docker.yaml +++ b/.github/workflows/docker.yaml @@ -9,6 +9,7 @@ env: DOCKER_ORG: public.ecr.aws/v6b8u5o6 DOCKER_SLUG: public.ecr.aws/v6b8u5o6/notify-api KUBECTL_VERSION: '1.23.6' + WORKFLOW_PAT: ${{ secrets.WORKFLOW_GITHUB_PAT }} permissions: id-token: write # This is required for requesting the OIDC JWT @@ -26,13 +27,6 @@ jobs: unzip -q awscliv2.zip sudo ./aws/install --update aws --version - - name: Install kubectl - run: | - curl -LO https://storage.googleapis.com/kubernetes-release/release/v$KUBECTL_VERSION/bin/linux/amd64/kubectl - chmod +x ./kubectl - sudo mv ./kubectl /usr/local/bin/kubectl - kubectl version --client - mkdir -p $HOME/.kube - name: Configure credentials to CDS public ECR using OIDC uses: aws-actions/configure-aws-credentials@master @@ -40,7 +34,7 @@ jobs: role-to-assume: arn:aws:iam::283582579564:role/notification-api-apply role-session-name: NotifyApiGitHubActions aws-region: "us-east-1" - + - name: Login to ECR id: login-ecr uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1 @@ -56,43 +50,19 @@ jobs: -t $DOCKER_SLUG:${GITHUB_SHA::7} \ -t $DOCKER_SLUG:latest \ -f ci/Dockerfile . + - name: Publish run: | docker push $DOCKER_SLUG:latest && docker push $DOCKER_SLUG:${GITHUB_SHA::7} - - name: Configure credentials to Notify account using OIDC - uses: aws-actions/configure-aws-credentials@master - with: - role-to-assume: arn:aws:iam::239043911459:role/notification-api-apply - role-session-name: NotifyApiGitHubActions - aws-region: "ca-central-1" - - - name: Get Kubernetes configuration - run: | - aws eks --region $AWS_REGION update-kubeconfig --name notification-canada-ca-staging-eks-cluster --kubeconfig $HOME/.kube/config - - name: Update images in staging + - name: Rollout in Kubernetes run: | - kubectl set image deployment.apps/api api=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-beat celery-beat=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-sms celery-sms=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-primary celery-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-scalable celery-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-sms-send-primary celery-sms-send-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-sms-send-scalable celery-sms-send-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-email-send-primary celery-email-send-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-email-send-scalable celery-email-send-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - - - name: Restart deployments in staging - run: | - kubectl rollout restart deployment/api -n notification-canada-ca - kubectl rollout restart deployment/celery-beat -n notification-canada-ca - kubectl rollout restart deployment/celery-sms -n notification-canada-ca - kubectl rollout restart deployment/celery-primary -n notification-canada-ca - kubectl rollout restart deployment/celery-scalable -n notification-canada-ca - kubectl rollout restart deployment/celery-sms-send-primary -n notification-canada-ca - kubectl rollout restart deployment/celery-sms-send-scalable -n notification-canada-ca - kubectl rollout restart deployment/celery-email-send-primary -n notification-canada-ca - kubectl rollout restart deployment/celery-email-send-scalable -n notification-canada-ca + PAYLOAD={\"ref\":\"main\",\"inputs\":{\"docker_sha\":\"${GITHUB_SHA::7}\"}} + curl -L -X POST -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $WORKFLOW_PAT" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/cds-snc/notification-manifests/actions/workflows/api-rollout-k8s-staging.yaml/dispatches \ + -d $PAYLOAD - name: my-app-install token id: notify-pr-bot @@ -118,3 +88,4 @@ jobs: run: | json="{'text':' CI is failing in !'}" curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }} + From ad0546694ead9e477397fc1fb3a6e4934aeeac86 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Thu, 7 Mar 2024 15:33:31 -0500 Subject: [PATCH 20/33] Fix/restrict job query size (#2127) --- app/celery/nightly_tasks.py | 17 +++++++++-------- app/dao/jobs_dao.py | 15 +++++++++------ tests/app/dao/test_jobs_dao.py | 33 +++++++++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 14 deletions(-) diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index c69e32018a..61a358fecc 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -49,14 +49,15 @@ def remove_letter_csv_files(): def _remove_csv_files(job_types): - jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) - current_app.logger.info("TEMP LOGGING: trying to remove {} jobs.".format(len(jobs))) - for job in jobs: - current_app.logger.info("TEMP LOGGING: trying to remove Job ID {} from s3.".format(job.id)) - s3.remove_job_from_s3(job.service_id, job.id) - current_app.logger.info("TEMP LOGGING: trying to archive Job ID {}".format(job.id)) - dao_archive_job(job) - current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) + while True: + jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=20000) + if len(jobs) == 0: + break + current_app.logger.info("Archiving {} jobs.".format(len(jobs))) + for job in jobs: + s3.remove_job_from_s3(job.service_id, job.id) + dao_archive_job(job) + current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) @notify_celery.task(name="delete-sms-notifications") diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 3b945b15fc..ec3b80f1ae 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -129,7 +129,7 @@ def dao_update_job(job): db.session.commit() -def dao_get_jobs_older_than_data_retention(notification_types): +def dao_get_jobs_older_than_data_retention(notification_types, limit=None): flexible_data_retention = ServiceDataRetention.query.filter( ServiceDataRetention.notification_type.in_(notification_types) ).all() @@ -137,8 +137,7 @@ def dao_get_jobs_older_than_data_retention(notification_types): today = datetime.utcnow().date() for f in flexible_data_retention: end_date = today - timedelta(days=f.days_of_retention) - - jobs.extend( + query = ( Job.query.join(Template) .filter( func.coalesce(Job.scheduled_for, Job.created_at) < end_date, @@ -147,13 +146,15 @@ def dao_get_jobs_older_than_data_retention(notification_types): Job.service_id == f.service_id, ) .order_by(desc(Job.created_at)) - .all() ) + if limit: + query = query.limit(limit) + jobs.extend(query.all()) end_date = today - timedelta(days=7) for notification_type in notification_types: services_with_data_retention = [x.service_id for x in flexible_data_retention if x.notification_type == notification_type] - jobs.extend( + query = ( Job.query.join(Template) .filter( func.coalesce(Job.scheduled_for, Job.created_at) < end_date, @@ -162,8 +163,10 @@ def dao_get_jobs_older_than_data_retention(notification_types): Job.service_id.notin_(services_with_data_retention), ) .order_by(desc(Job.created_at)) - .all() ) + if limit: + query = query.limit(limit - len(jobs)) + jobs.extend(query.all()) return jobs diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 29e1c001f0..ebb8f50f01 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -17,6 +17,7 @@ dao_set_scheduled_jobs_to_pending, dao_update_job, ) +from app.dao.service_data_retention_dao import insert_service_data_retention from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, Job from tests.app.db import ( create_job, @@ -348,6 +349,38 @@ def test_should_get_jobs_seven_days_old_by_scheduled_for_date(sample_service): assert job_to_remain.id not in [job.id for job in jobs] +@freeze_time("2016-10-31 10:00:00") +def test_should_get_limited_number_of_jobs(sample_template): + flexable_retention_service = create_service(service_name="Another service") + insert_service_data_retention(flexable_retention_service.id, sample_template.template_type, 3) + flexable_template = create_template(flexable_retention_service, template_type=sample_template.template_type) + + eight_days_ago = datetime.utcnow() - timedelta(days=8) + four_days_ago = datetime.utcnow() - timedelta(days=4) + + create_job(flexable_template, created_at=four_days_ago) + create_job(flexable_template, created_at=four_days_ago) + create_job(sample_template, created_at=eight_days_ago) + create_job(sample_template, created_at=eight_days_ago) + + jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type], limit=3) + + assert len(jobs) == 3 + + +@freeze_time("2016-10-31 10:00:00") +def test_should_get_not_get_limited_number_of_jobs_by_default(sample_template): + eight_days_ago = datetime.utcnow() - timedelta(days=8) + + create_job(sample_template, created_at=eight_days_ago) + create_job(sample_template, created_at=eight_days_ago) + create_job(sample_template, created_at=eight_days_ago) + + jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type]) + + assert len(jobs) == 3 + + def assert_job_stat(job, result, sent, delivered, failed): assert result.job_id == job.id assert result.original_file_name == job.original_file_name From 00c44e33e565d5d6e54b76e264e861d78f16c9a0 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Mon, 11 Mar 2024 11:54:47 -0400 Subject: [PATCH 21/33] upgrade utils to 52.1.5 (#2131) --- poetry.lock | 22 +++++++++++----------- pyproject.toml | 2 +- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/poetry.lock b/poetry.lock index cd2b5ca1ba..1ab0346e40 100644 --- a/poetry.lock +++ b/poetry.lock @@ -331,13 +331,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "bleach" -version = "6.0.0" +version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, - {file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"}, + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, ] [package.dependencies] @@ -345,7 +345,7 @@ six = ">=1.9.0" webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.2)"] +css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "blinker" @@ -2444,16 +2444,16 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.1.3" +version = "52.1.5" description = "Shared python code for Notification - Provides logging utils etc." optional = false -python-versions = "~3.10" +python-versions = "~3.10.9" files = [] develop = false [package.dependencies] awscli = "1.32.25" -bleach = "6.0.0" +bleach = "6.1.0" boto3 = "1.34.25" cachetools = "4.2.4" certifi = "^2023.7.22" @@ -2479,8 +2479,8 @@ werkzeug = "2.3.7" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "52.1.3" -resolved_reference = "06a40db6286f525fe3551e029418458d33342592" +reference = "52.1.5" +resolved_reference = "9d9e8c7c32e3608f4dd8f320eaba4bb67edfcbf5" [[package]] name = "ordered-set" @@ -4255,4 +4255,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "f2bf5c58fe6d2689072e7b9d4cf91976e07e76ade98dc3153977c4377b98c86e" +content-hash = "f00992b7f47d8434a76d0be08135eace31315c696e20a222a10e2bf926e8a561" diff --git a/pyproject.toml b/pyproject.toml index 609ff16d41..db3ae6fff3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ Werkzeug = "2.3.7" MarkupSafe = "2.1.4" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.1.2" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.1.3" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.1.5" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.7.1" greenlet = "2.0.2" From 71a1268cda16f8f3069f80d9c47454bfb04879a8 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Tue, 12 Mar 2024 16:28:39 -0400 Subject: [PATCH 22/33] Batch up job archiving code a bit (#2139) --- app/aws/s3.py | 6 ++++-- app/celery/nightly_tasks.py | 14 +++++++------ app/dao/jobs_dao.py | 7 ++++--- tests/app/celery/test_nightly_tasks.py | 27 +++++++++----------------- 4 files changed, 25 insertions(+), 29 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index 524eb876a9..39595ee4af 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -60,8 +60,10 @@ def get_job_metadata_from_s3(service_id, job_id): return obj.get()["Metadata"] -def remove_job_from_s3(service_id, job_id): - return remove_s3_object(*get_job_location(service_id, job_id)) +def remove_job_batch_from_s3(jobs): + bucket = resource("s3").Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"]) + object_keys = [FILE_LOCATION_STRUCTURE.format(job.service_id, job.id) for job in jobs] + bucket.delete_objects(Delete={"Objects": [{"Key": key} for key in object_keys]}) def get_s3_bucket_objects(bucket_name, subfolder="", older_than=7, limit_days=2): diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index 61a358fecc..07af798dd6 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -12,7 +12,10 @@ from app.config import QueueNames from app.cronitor import cronitor from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention -from app.dao.jobs_dao import dao_archive_job, dao_get_jobs_older_than_data_retention +from app.dao.jobs_dao import ( + dao_archive_job_batch, + dao_get_jobs_older_than_data_retention, +) from app.dao.notifications_dao import ( dao_timeout_notifications, delete_notifications_older_than_retention_by_type, @@ -50,14 +53,13 @@ def remove_letter_csv_files(): def _remove_csv_files(job_types): while True: - jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=20000) + jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=100) if len(jobs) == 0: break current_app.logger.info("Archiving {} jobs.".format(len(jobs))) - for job in jobs: - s3.remove_job_from_s3(job.service_id, job.id) - dao_archive_job(job) - current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) + s3.remove_job_batch_from_s3(jobs) + dao_archive_job_batch(jobs) + current_app.logger.info(f"Jobs archived: {[job.id for job in jobs]}") @notify_celery.task(name="delete-sms-notifications") diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index ec3b80f1ae..731fa8094b 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -71,9 +71,10 @@ def dao_get_job_by_id(job_id) -> Job: return Job.query.filter_by(id=job_id).one() -def dao_archive_job(job): - job.archived = True - db.session.add(job) +def dao_archive_job_batch(jobs): + for job in jobs: + job.archived = True + db.session.add(job) db.session.commit() diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index 4cdc277db9..d0331cea9b 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -76,7 +76,7 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_ """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_job_batch_from_s3") seven_days_ago = datetime.utcnow() - timedelta(days=7) just_under_seven_days = seven_days_ago + timedelta(seconds=1) @@ -93,10 +93,8 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_ remove_sms_email_csv_files() - assert s3.remove_job_from_s3.call_args_list == [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - ] + args = s3.remove_job_batch_from_s3.call_args.args[0] + assert sorted(args, key=lambda x: x.id) == sorted([job1_to_delete, job2_to_delete], key=lambda x: x.id) assert job1_to_delete.archived is True assert dont_delete_me_1.archived is False @@ -106,7 +104,7 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, n """ Jobs older than retention period are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_job_batch_from_s3") service_1 = create_service(service_name="service 1") service_2 = create_service(service_name="service 2") create_service_data_retention(service=service_1, notification_type=SMS_TYPE, days_of_retention=3) @@ -131,20 +129,15 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, n remove_sms_email_csv_files() - s3.remove_job_from_s3.assert_has_calls( - [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - call(job3_to_delete.service_id, job3_to_delete.id), - call(job4_to_delete.service_id, job4_to_delete.id), - ], - any_order=True, + args = s3.remove_job_batch_from_s3.call_args.args[0] + assert sorted(args, key=lambda x: x.id) == sorted( + [job1_to_delete, job2_to_delete, job3_to_delete, job4_to_delete], key=lambda x: x.id ) @freeze_time("2017-01-01 10:00:00") def test_remove_csv_files_filters_by_type(mocker, sample_service): - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_job_batch_from_s3") """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ @@ -158,9 +151,7 @@ def test_remove_csv_files_filters_by_type(mocker, sample_service): remove_letter_csv_files() - assert s3.remove_job_from_s3.call_args_list == [ - call(job_to_delete.service_id, job_to_delete.id), - ] + assert s3.remove_job_batch_from_s3.call_args.args[0] == [job_to_delete] def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): From 7f0166ce62264ab7b88f396b07b99269e841b8b0 Mon Sep 17 00:00:00 2001 From: Guillaume Charest <1690085+gcharest@users.noreply.github.com> Date: Fri, 15 Mar 2024 10:55:28 -0400 Subject: [PATCH 23/33] chore: add base catalog info (#2140) --- catalog-info.yaml | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/catalog-info.yaml b/catalog-info.yaml index ca14ec9b7f..5d62d960c8 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -1,11 +1,18 @@ +# Metadata for the backstage catalog accessible at this link: +# https://backstage.cdssandbox.xyz/ +--- apiVersion: backstage.io/v1alpha1 kind: Component metadata: - name: notification-api - description: GC Notify API | GC Notification API + name: notification-api-service + title: GC Notify API | GC Notification API + description: REST API service for GC Notification + annotations: + github.com/project-slug: cds-snc/notification-api labels: license: MIT spec: - type: website - lifecycle: experimental - owner: cds-snc + type: service + lifecycle: production + owner: group:cds-snc/notify-dev + system: gc-notification From 604ec61e5c835b143d0b6647ca510e02e5ee07a5 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Tue, 19 Mar 2024 10:02:21 -0400 Subject: [PATCH 24/33] Revert "Batch up job archiving code a bit (#2139)" (#2142) This reverts commit 71a1268cda16f8f3069f80d9c47454bfb04879a8. --- app/aws/s3.py | 6 ++---- app/celery/nightly_tasks.py | 14 ++++++------- app/dao/jobs_dao.py | 7 +++---- tests/app/celery/test_nightly_tasks.py | 27 +++++++++++++++++--------- 4 files changed, 29 insertions(+), 25 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index 39595ee4af..524eb876a9 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -60,10 +60,8 @@ def get_job_metadata_from_s3(service_id, job_id): return obj.get()["Metadata"] -def remove_job_batch_from_s3(jobs): - bucket = resource("s3").Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"]) - object_keys = [FILE_LOCATION_STRUCTURE.format(job.service_id, job.id) for job in jobs] - bucket.delete_objects(Delete={"Objects": [{"Key": key} for key in object_keys]}) +def remove_job_from_s3(service_id, job_id): + return remove_s3_object(*get_job_location(service_id, job_id)) def get_s3_bucket_objects(bucket_name, subfolder="", older_than=7, limit_days=2): diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index 07af798dd6..61a358fecc 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -12,10 +12,7 @@ from app.config import QueueNames from app.cronitor import cronitor from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention -from app.dao.jobs_dao import ( - dao_archive_job_batch, - dao_get_jobs_older_than_data_retention, -) +from app.dao.jobs_dao import dao_archive_job, dao_get_jobs_older_than_data_retention from app.dao.notifications_dao import ( dao_timeout_notifications, delete_notifications_older_than_retention_by_type, @@ -53,13 +50,14 @@ def remove_letter_csv_files(): def _remove_csv_files(job_types): while True: - jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=100) + jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=20000) if len(jobs) == 0: break current_app.logger.info("Archiving {} jobs.".format(len(jobs))) - s3.remove_job_batch_from_s3(jobs) - dao_archive_job_batch(jobs) - current_app.logger.info(f"Jobs archived: {[job.id for job in jobs]}") + for job in jobs: + s3.remove_job_from_s3(job.service_id, job.id) + dao_archive_job(job) + current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) @notify_celery.task(name="delete-sms-notifications") diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 731fa8094b..ec3b80f1ae 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -71,10 +71,9 @@ def dao_get_job_by_id(job_id) -> Job: return Job.query.filter_by(id=job_id).one() -def dao_archive_job_batch(jobs): - for job in jobs: - job.archived = True - db.session.add(job) +def dao_archive_job(job): + job.archived = True + db.session.add(job) db.session.commit() diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index d0331cea9b..4cdc277db9 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -76,7 +76,7 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_ """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_batch_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") seven_days_ago = datetime.utcnow() - timedelta(days=7) just_under_seven_days = seven_days_ago + timedelta(seconds=1) @@ -93,8 +93,10 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_ remove_sms_email_csv_files() - args = s3.remove_job_batch_from_s3.call_args.args[0] - assert sorted(args, key=lambda x: x.id) == sorted([job1_to_delete, job2_to_delete], key=lambda x: x.id) + assert s3.remove_job_from_s3.call_args_list == [ + call(job1_to_delete.service_id, job1_to_delete.id), + call(job2_to_delete.service_id, job2_to_delete.id), + ] assert job1_to_delete.archived is True assert dont_delete_me_1.archived is False @@ -104,7 +106,7 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, n """ Jobs older than retention period are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_batch_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") service_1 = create_service(service_name="service 1") service_2 = create_service(service_name="service 2") create_service_data_retention(service=service_1, notification_type=SMS_TYPE, days_of_retention=3) @@ -129,15 +131,20 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, n remove_sms_email_csv_files() - args = s3.remove_job_batch_from_s3.call_args.args[0] - assert sorted(args, key=lambda x: x.id) == sorted( - [job1_to_delete, job2_to_delete, job3_to_delete, job4_to_delete], key=lambda x: x.id + s3.remove_job_from_s3.assert_has_calls( + [ + call(job1_to_delete.service_id, job1_to_delete.id), + call(job2_to_delete.service_id, job2_to_delete.id), + call(job3_to_delete.service_id, job3_to_delete.id), + call(job4_to_delete.service_id, job4_to_delete.id), + ], + any_order=True, ) @freeze_time("2017-01-01 10:00:00") def test_remove_csv_files_filters_by_type(mocker, sample_service): - mocker.patch("app.celery.nightly_tasks.s3.remove_job_batch_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ @@ -151,7 +158,9 @@ def test_remove_csv_files_filters_by_type(mocker, sample_service): remove_letter_csv_files() - assert s3.remove_job_batch_from_s3.call_args.args[0] == [job_to_delete] + assert s3.remove_job_from_s3.call_args_list == [ + call(job_to_delete.service_id, job_to_delete.id), + ] def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): From 1d576ce0da5e5162dafecf39382bb68b8e2efcdf Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Wed, 20 Mar 2024 15:32:10 -0400 Subject: [PATCH 25/33] Batch up job archiving - not crashy this time (#2143) --- app/aws/s3.py | 19 ++++++++++-- app/celery/nightly_tasks.py | 33 +++++++++++++------- app/dao/jobs_dao.py | 15 ++++++--- tests/app/aws/test_s3.py | 30 +++++++++++++++++- tests/app/celery/test_nightly_tasks.py | 43 ++++++++++---------------- tests/app/dao/test_jobs_dao.py | 18 ++++++----- 6 files changed, 107 insertions(+), 51 deletions(-) diff --git a/app/aws/s3.py b/app/aws/s3.py index 524eb876a9..9ebeb6d137 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -1,5 +1,6 @@ import uuid from datetime import datetime, timedelta +from typing import List import botocore import pytz @@ -7,6 +8,8 @@ from flask import current_app from notifications_utils.s3 import s3upload as utils_s3upload +from app.models import Job + FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv" @@ -60,8 +63,20 @@ def get_job_metadata_from_s3(service_id, job_id): return obj.get()["Metadata"] -def remove_job_from_s3(service_id, job_id): - return remove_s3_object(*get_job_location(service_id, job_id)) +def remove_jobs_from_s3(jobs: List[Job], batch_size=1000): + """ + Remove the files from S3 for the given jobs. + + Args: + jobs (List[Job]): The jobs whose files need to be removed from S3. + batch_size (int, optional): The number of jobs to process in each boto call. Defaults to the AWS maximum of 1000. + """ + + bucket = resource("s3").Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"]) + + for start in range(0, len(jobs), batch_size): + object_keys = [FILE_LOCATION_STRUCTURE.format(job.service_id, job.id) for job in jobs[start : start + batch_size]] + bucket.delete_objects(Delete={"Objects": [{"Key": key} for key in object_keys]}) def get_s3_bucket_objects(bucket_name, subfolder="", older_than=7, limit_days=2): diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index 61a358fecc..4c3e5832d1 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -1,4 +1,5 @@ from datetime import datetime, timedelta +from typing import List import pytz from flask import current_app @@ -12,7 +13,7 @@ from app.config import QueueNames from app.cronitor import cronitor from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention -from app.dao.jobs_dao import dao_archive_job, dao_get_jobs_older_than_data_retention +from app.dao.jobs_dao import dao_archive_jobs, dao_get_jobs_older_than_data_retention from app.dao.notifications_dao import ( dao_timeout_notifications, delete_notifications_older_than_retention_by_type, @@ -37,27 +38,37 @@ @notify_celery.task(name="remove_sms_email_jobs") @cronitor("remove_sms_email_jobs") @statsd(namespace="tasks") -def remove_sms_email_csv_files(): - _remove_csv_files([EMAIL_TYPE, SMS_TYPE]) +def remove_sms_email_jobs(): + """ + Remove csv files from s3 and archive email and sms jobs older than data retention period. + """ + + _archive_jobs([EMAIL_TYPE, SMS_TYPE]) @notify_celery.task(name="remove_letter_jobs") @cronitor("remove_letter_jobs") @statsd(namespace="tasks") -def remove_letter_csv_files(): - _remove_csv_files([LETTER_TYPE]) +def remove_letter_jobs(): + _archive_jobs([LETTER_TYPE]) + + +def _archive_jobs(job_types: List[str]): + """ + Remove csv files from s3 and archive jobs older than data retention period. + Args: + job_types (List[str]): list of job types to remove csv files and archive jobs for + """ -def _remove_csv_files(job_types): while True: - jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=20000) + jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=100) if len(jobs) == 0: break current_app.logger.info("Archiving {} jobs.".format(len(jobs))) - for job in jobs: - s3.remove_job_from_s3(job.service_id, job.id) - dao_archive_job(job) - current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) + s3.remove_jobs_from_s3(jobs) + dao_archive_jobs(jobs) + current_app.logger.info(f"Jobs archived: {[job.id for job in jobs]}") @notify_celery.task(name="delete-sms-notifications") diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index ec3b80f1ae..28a8b1f15d 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -1,5 +1,6 @@ import uuid from datetime import datetime, timedelta +from typing import Iterable from flask import current_app from notifications_utils.letter_timings import ( @@ -71,9 +72,15 @@ def dao_get_job_by_id(job_id) -> Job: return Job.query.filter_by(id=job_id).one() -def dao_archive_job(job): - job.archived = True - db.session.add(job) +def dao_archive_jobs(jobs: Iterable[Job]): + """ + Archive the given jobs. + Args: + jobs (Iterable[Job]): The jobs to archive. + """ + for job in jobs: + job.archived = True + db.session.add(job) db.session.commit() @@ -148,7 +155,7 @@ def dao_get_jobs_older_than_data_retention(notification_types, limit=None): .order_by(desc(Job.created_at)) ) if limit: - query = query.limit(limit) + query = query.limit(limit - len(jobs)) jobs.extend(query.all()) end_date = today - timedelta(days=7) diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py index bae56c3f45..02de33cbba 100644 --- a/tests/app/aws/test_s3.py +++ b/tests/app/aws/test_s3.py @@ -1,6 +1,6 @@ import uuid from datetime import datetime, timedelta -from unittest.mock import call +from unittest.mock import Mock, call import pytest import pytz @@ -12,6 +12,7 @@ get_list_of_files_by_suffix, get_s3_bucket_objects, get_s3_file, + remove_jobs_from_s3, remove_transformed_dvla_file, upload_job_to_s3, ) @@ -214,3 +215,30 @@ def test_upload_job_to_s3(notify_api, mocker): bucket_name=current_app.config["CSV_UPLOAD_BUCKET_NAME"], file_location=f"service-{service_id}-notify/{upload_id}.csv", ) + + +def test_remove_jobs_from_s3(notify_api, mocker): + mock = Mock() + mocker.patch("app.aws.s3.resource", return_value=mock) + jobs = [ + type("Job", (object,), {"service_id": "foo", "id": "j1"}), + type("Job", (object,), {"service_id": "foo", "id": "j2"}), + type("Job", (object,), {"service_id": "foo", "id": "j3"}), + type("Job", (object,), {"service_id": "foo", "id": "j4"}), + type("Job", (object,), {"service_id": "foo", "id": "j5"}), + ] + + remove_jobs_from_s3(jobs, batch_size=2) + + mock.assert_has_calls( + [ + call.Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"]), + call.Bucket().delete_objects( + Delete={"Objects": [{"Key": "service-foo-notify/j1.csv"}, {"Key": "service-foo-notify/j2.csv"}]} + ), + call.Bucket().delete_objects( + Delete={"Objects": [{"Key": "service-foo-notify/j3.csv"}, {"Key": "service-foo-notify/j4.csv"}]} + ), + call.Bucket().delete_objects(Delete={"Objects": [{"Key": "service-foo-notify/j5.csv"}]}), + ] + ) diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index 4cdc277db9..7de3d47b74 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -17,8 +17,8 @@ delete_sms_notifications_older_than_retention, letter_raise_alert_if_no_ack_file_for_zip, raise_alert_if_letter_notifications_still_sending, - remove_letter_csv_files, - remove_sms_email_csv_files, + remove_letter_jobs, + remove_sms_email_jobs, remove_transformed_dvla_files, s3, send_daily_performance_platform_stats, @@ -72,11 +72,11 @@ def mock_s3_get_list_diff(bucket_name, subfolder="", suffix="", last_modified=No @freeze_time("2016-10-18T10:00:00") -def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_db_session, mocker, sample_template): +def test_will_archive_jobs_older_than_seven_days(notify_db, notify_db_session, mocker, sample_template): """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") seven_days_ago = datetime.utcnow() - timedelta(days=7) just_under_seven_days = seven_days_ago + timedelta(seconds=1) @@ -91,22 +91,20 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_ dont_delete_me_1 = create_job(sample_template, created_at=seven_days_ago) create_job(sample_template, created_at=just_under_seven_days) - remove_sms_email_csv_files() + remove_sms_email_jobs() - assert s3.remove_job_from_s3.call_args_list == [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - ] + args = s3.remove_jobs_from_s3.call_args.args[0] + assert sorted(args, key=lambda x: x.id) == sorted([job1_to_delete, job2_to_delete], key=lambda x: x.id) assert job1_to_delete.archived is True assert dont_delete_me_1.archived is False @freeze_time("2016-10-18T10:00:00") -def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, notify_db_session, mocker): +def test_will_archive_jobs_older_than_retention_period(notify_db, notify_db_session, mocker): """ Jobs older than retention period are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") service_1 = create_service(service_name="service 1") service_2 = create_service(service_name="service 2") create_service_data_retention(service=service_1, notification_type=SMS_TYPE, days_of_retention=3) @@ -129,22 +127,17 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, n job3_to_delete = create_job(email_template_service_2, created_at=thirty_one_days_ago) job4_to_delete = create_job(sms_template_service_2, created_at=eight_days_ago) - remove_sms_email_csv_files() + remove_sms_email_jobs() - s3.remove_job_from_s3.assert_has_calls( - [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - call(job3_to_delete.service_id, job3_to_delete.id), - call(job4_to_delete.service_id, job4_to_delete.id), - ], - any_order=True, + args = s3.remove_jobs_from_s3.call_args.args[0] + assert sorted(args, key=lambda x: x.id) == sorted( + [job1_to_delete, job2_to_delete, job3_to_delete, job4_to_delete], key=lambda x: x.id ) @freeze_time("2017-01-01 10:00:00") -def test_remove_csv_files_filters_by_type(mocker, sample_service): - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") +def test_archive_jobs_by_type(mocker, sample_service): + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ @@ -156,11 +149,9 @@ def test_remove_csv_files_filters_by_type(mocker, sample_service): job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) create_job(template=sms_template, created_at=eight_days_ago) - remove_letter_csv_files() + remove_letter_jobs() - assert s3.remove_job_from_s3.call_args_list == [ - call(job_to_delete.service_id, job_to_delete.id), - ] + assert s3.remove_jobs_from_s3.call_args.args[0] == [job_to_delete] def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index ebb8f50f01..58e3007739 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -351,17 +351,21 @@ def test_should_get_jobs_seven_days_old_by_scheduled_for_date(sample_service): @freeze_time("2016-10-31 10:00:00") def test_should_get_limited_number_of_jobs(sample_template): - flexable_retention_service = create_service(service_name="Another service") - insert_service_data_retention(flexable_retention_service.id, sample_template.template_type, 3) - flexable_template = create_template(flexable_retention_service, template_type=sample_template.template_type) + flexible_retention_service1 = create_service(service_name="Another service 1") + insert_service_data_retention(flexible_retention_service1.id, sample_template.template_type, 3) + flexible_template1 = create_template(flexible_retention_service1, template_type=sample_template.template_type) + + flexible_retention_service2 = create_service(service_name="Another service 2") + insert_service_data_retention(flexible_retention_service2.id, sample_template.template_type, 2) + flexible_template2 = create_template(flexible_retention_service2, template_type=sample_template.template_type) eight_days_ago = datetime.utcnow() - timedelta(days=8) four_days_ago = datetime.utcnow() - timedelta(days=4) - create_job(flexable_template, created_at=four_days_ago) - create_job(flexable_template, created_at=four_days_ago) - create_job(sample_template, created_at=eight_days_ago) - create_job(sample_template, created_at=eight_days_ago) + for _ in range(4): + create_job(flexible_template1, created_at=four_days_ago) + create_job(flexible_template2, created_at=four_days_ago) + create_job(sample_template, created_at=eight_days_ago) jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type], limit=3) From da922e1e26dd9f01120ee3546aa1a1fc3e8144ef Mon Sep 17 00:00:00 2001 From: Jumana B Date: Thu, 21 Mar 2024 11:11:21 -0400 Subject: [PATCH 26/33] Add filtering with an organisation id (#2137) * Add filtering with an organisation id * Update tests/app/email_branding/test_rest.py Co-authored-by: Andrew * test --------- Co-authored-by: Andrew --- app/dao/email_branding_dao.py | 4 +++- app/email_branding/rest.py | 5 ++++- tests/app/dao/test_email_branding_dao.py | 12 ++++++++++-- tests/app/db.py | 4 +++- tests/app/email_branding/test_rest.py | 17 +++++++++++++++++ 5 files changed, 37 insertions(+), 5 deletions(-) diff --git a/app/dao/email_branding_dao.py b/app/dao/email_branding_dao.py index d8738e9200..1ed90ae1e6 100644 --- a/app/dao/email_branding_dao.py +++ b/app/dao/email_branding_dao.py @@ -3,7 +3,9 @@ from app.models import EmailBranding -def dao_get_email_branding_options(): +def dao_get_email_branding_options(filter_by_organisation_id=None): + if filter_by_organisation_id: + return EmailBranding.query.filter_by(organisation_id=filter_by_organisation_id).all() return EmailBranding.query.all() diff --git a/app/email_branding/rest.py b/app/email_branding/rest.py index 3dc5086148..6ae95745be 100644 --- a/app/email_branding/rest.py +++ b/app/email_branding/rest.py @@ -20,7 +20,10 @@ @email_branding_blueprint.route("", methods=["GET"]) def get_email_branding_options(): - email_branding_options = [o.serialize() for o in dao_get_email_branding_options()] + filter_by_organisation_id = request.args.get("organisation_id", None) + email_branding_options = [ + o.serialize() for o in dao_get_email_branding_options(filter_by_organisation_id=filter_by_organisation_id) + ] return jsonify(email_branding=email_branding_options) diff --git a/tests/app/dao/test_email_branding_dao.py b/tests/app/dao/test_email_branding_dao.py index a69c912577..a3bc948a34 100644 --- a/tests/app/dao/test_email_branding_dao.py +++ b/tests/app/dao/test_email_branding_dao.py @@ -5,11 +5,12 @@ dao_update_email_branding, ) from app.models import EmailBranding -from tests.app.db import create_email_branding +from tests.app.db import create_email_branding, create_organisation def test_get_email_branding_options_gets_all_email_branding(notify_db, notify_db_session): - email_branding_1 = create_email_branding(name="test_email_branding_1") + org_1 = create_organisation() + email_branding_1 = create_email_branding(name="test_email_branding_1", organisation_id=org_1.id) email_branding_2 = create_email_branding(name="test_email_branding_2") email_branding = dao_get_email_branding_options() @@ -18,6 +19,13 @@ def test_get_email_branding_options_gets_all_email_branding(notify_db, notify_db assert email_branding_1 == email_branding[0] assert email_branding_2 == email_branding[1] + org_1_id = email_branding_1.organisation_id + + email_branding = dao_get_email_branding_options(filter_by_organisation_id=org_1_id) + assert len(email_branding) == 1 + assert email_branding_1 == email_branding[0] + assert email_branding[0].organisation_id == org_1_id + def test_get_email_branding_by_id_gets_correct_email_branding(notify_db, notify_db_session): email_branding = create_email_branding() diff --git a/tests/app/db.py b/tests/app/db.py index 1dacec37bc..c9ff33427c 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -502,13 +502,15 @@ def create_service_callback_api( return service_callback_api -def create_email_branding(colour="blue", logo="test_x2.png", name="test_org_1", text="DisplayName"): +def create_email_branding(colour="blue", logo="test_x2.png", name="test_org_1", text="DisplayName", organisation_id=None): data = { "colour": colour, "logo": logo, "name": name, "text": text, } + if organisation_id: + data["organisation_id"] = organisation_id email_branding = EmailBranding(**data) dao_create_email_branding(email_branding) diff --git a/tests/app/email_branding/test_rest.py b/tests/app/email_branding/test_rest.py index 9d7bd1f6f7..05e0b5a48e 100644 --- a/tests/app/email_branding/test_rest.py +++ b/tests/app/email_branding/test_rest.py @@ -21,6 +21,23 @@ def test_get_email_branding_options(admin_request, notify_db, notify_db_session, assert email_branding[1]["organisation_id"] == "" +def test_get_email_branding_options_filter_org(admin_request, notify_db, notify_db_session, sample_organisation): + email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1", organisation_id=sample_organisation.id) + email_branding2 = EmailBranding(colour="#000000", logo="/path/other.png", name="Org2") + notify_db.session.add_all([email_branding1, email_branding2]) + notify_db.session.commit() + email_branding = admin_request.get("email_branding.get_email_branding_options", organisation_id=sample_organisation.id)[ + "email_branding" + ] + + assert len(email_branding) == 1 + assert email_branding[0]["organisation_id"] == str(sample_organisation.id) + + email_branding2 = admin_request.get("email_branding.get_email_branding_options")["email_branding"] + + assert len(email_branding2) == 2 + + def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): email_branding = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Some Org", text="My Org") notify_db.session.add(email_branding) From 4531e5eba8db6191221953ef78a0cd84b950f62f Mon Sep 17 00:00:00 2001 From: Ben Larabie Date: Thu, 21 Mar 2024 15:43:06 -0400 Subject: [PATCH 27/33] Adding error detection around the k8s rollout (#2145) * adding error detection around the k8s rollout * fixing for shellcheck --- .github/workflows/docker.yaml | 7 +------ scripts/callManifestsRollout.sh | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 6 deletions(-) create mode 100755 scripts/callManifestsRollout.sh diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml index 5fa4e2f343..5ff8c67f88 100644 --- a/.github/workflows/docker.yaml +++ b/.github/workflows/docker.yaml @@ -57,12 +57,7 @@ jobs: - name: Rollout in Kubernetes run: | - PAYLOAD={\"ref\":\"main\",\"inputs\":{\"docker_sha\":\"${GITHUB_SHA::7}\"}} - curl -L -X POST -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer $WORKFLOW_PAT" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - https://api.github.com/repos/cds-snc/notification-manifests/actions/workflows/api-rollout-k8s-staging.yaml/dispatches \ - -d $PAYLOAD + ./scripts/callManifestsRollout.sh ${GITHUB_SHA::7} - name: my-app-install token id: notify-pr-bot diff --git a/scripts/callManifestsRollout.sh b/scripts/callManifestsRollout.sh new file mode 100755 index 0000000000..29229ea093 --- /dev/null +++ b/scripts/callManifestsRollout.sh @@ -0,0 +1,17 @@ +#!/bin/bash +GITHUB_SHA=$1 +PAYLOAD="{\"ref\":\"main\",\"inputs\":{\"docker_sha\":\"$GITHUB_SHA\"}}" + + +RESPONSE=$(curl -w '%{http_code}\n' \ + -o /dev/null -s \ + -L -X POST -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $WORKFLOW_PAT" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/cds-snc/notification-manifests/actions/workflows/api-rollout-k8s-staging.yaml/dispatches \ + -d "$PAYLOAD") + +if [ "$RESPONSE" != 204 ]; then + echo "ERROR CALLING MANIFESTS ROLLOUT: HTTP RESPONSE: $RESPONSE" + exit 1 +fi From b51e4aa55f059186ffbdbf8ca9ea859c1939d12e Mon Sep 17 00:00:00 2001 From: "sre-read-write[bot]" <92993749+sre-read-write[bot]@users.noreply.github.com> Date: Mon, 25 Mar 2024 09:19:54 -0400 Subject: [PATCH 28/33] chore: synced file(s) with cds-snc/site-reliability-engineering (#2146) * chore: synced local '.github/workflows/s3-backup.yml' with remote 'tools/sre_file_sync/s3-backup.yml' * chore: synced local '.github/workflows/ossf-scorecard.yml' with remote 'tools/sre_file_sync/ossf-scorecard.yml' --------- Co-authored-by: sre-read-write[bot] <92993749+sre-read-write[bot]@users.noreply.github.com> --- .github/workflows/ossf-scorecard.yml | 2 +- .github/workflows/s3-backup.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml index f612283a7c..9102cfe9ee 100644 --- a/.github/workflows/ossf-scorecard.yml +++ b/.github/workflows/ossf-scorecard.yml @@ -25,7 +25,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@0ae0fb3a2ca18a43d6dea9c07cfb9bd01d17eae1 + uses: ossf/scorecard-action@50aaf84fb1a9f22255cb8bfb1729f4dd085c838c with: results_file: ossf-results.json results_format: json diff --git a/.github/workflows/s3-backup.yml b/.github/workflows/s3-backup.yml index eb41d4c82e..5262e9bf97 100644 --- a/.github/workflows/s3-backup.yml +++ b/.github/workflows/s3-backup.yml @@ -15,7 +15,7 @@ jobs: fetch-depth: 0 # retrieve all history - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 + uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 with: aws-access-key-id: ${{ secrets.AWS_S3_BACKUP_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_S3_BACKUP_SECRET_ACCESS_KEY }} From 63fd7d063142bd9dd31c8f9644eae5b0e4f4de48 Mon Sep 17 00:00:00 2001 From: Jumana B Date: Mon, 25 Mar 2024 11:01:55 -0400 Subject: [PATCH 29/33] New service doesn't inherit org branding (#2147) --- app/dao/services_dao.py | 2 -- tests/app/service/test_rest.py | 5 ++--- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 1e36ace594..281bfa6cf9 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -313,8 +313,6 @@ def dao_create_service( if organisation: service.organisation_id = organisation.id service.organisation_type = organisation.organisation_type - if organisation.email_branding: - service.email_branding = organisation.email_branding if organisation.letter_branding and not service.letter_branding: service.letter_branding = organisation.letter_branding diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index d7eae2d564..342943b433 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -458,7 +458,7 @@ def test_create_service_with_domain_sets_organisation(admin_request, sample_user assert json_resp["data"]["organisation"] is None -def test_create_service_inherits_branding_from_organisation(admin_request, sample_user, mocker): +def test_create_service_doesnt_inherit_branding_from_organisation(admin_request, sample_user, mocker): org = create_organisation() email_branding = create_email_branding() org.email_branding = email_branding @@ -482,8 +482,7 @@ def test_create_service_inherits_branding_from_organisation(admin_request, sampl _expected_status=201, ) - assert json_resp["data"]["email_branding"] == str(email_branding.id) - assert json_resp["data"]["letter_branding"] == str(letter_branding.id) + assert json_resp["data"]["email_branding"] is None def test_should_not_create_service_with_missing_user_id_field(notify_api, fake_uuid): From 8779e349d3e3d2e2377053548cdee38e8dfbdfb1 Mon Sep 17 00:00:00 2001 From: Jumana B Date: Mon, 25 Mar 2024 11:41:12 -0400 Subject: [PATCH 30/33] Force clients to send full API keys (#2099) * Revert "Revert "Add API Validation" (#2088)" This reverts commit 2de81548561984bbe1f8d2be78d2fc2a7541664e. * Add new error for full api req * fix * fix --------- Co-authored-by: William B <7444334+whabanks@users.noreply.github.com> --- app/api_key/rest.py | 3 +-- app/authentication/auth.py | 13 ++------- app/dao/api_key_dao.py | 25 ++++++++++++++--- tests/app/api_key/test_rest.py | 5 +++- .../app/authentication/test_authentication.py | 16 ++++++----- tests/app/dao/test_api_key_dao.py | 27 ++++++++++++++----- .../notifications/test_post_notifications.py | 12 ++++++--- 7 files changed, 68 insertions(+), 33 deletions(-) diff --git a/app/api_key/rest.py b/app/api_key/rest.py index 8ad96ec1e8..1aa6b28fd9 100644 --- a/app/api_key/rest.py +++ b/app/api_key/rest.py @@ -121,8 +121,7 @@ def revoke_api_keys(): # Step 1 try: - # take last 36 chars of string so that it works even if the full key is provided. - api_key_token = api_key_data["token"][-36:] + api_key_token = api_key_data["token"] api_key = get_api_key_by_secret(api_key_token) except Exception: current_app.logger.error( diff --git a/app/authentication/auth.py b/app/authentication/auth.py index 144c89079f..260a3fc78e 100644 --- a/app/authentication/auth.py +++ b/app/authentication/auth.py @@ -152,21 +152,12 @@ def requires_auth(): def _auth_by_api_key(auth_token): - # TODO: uncomment this when the grace period for the token prefix is over - # orig_token = auth_token - try: - # take last 36 chars of string so that it works even if the full key is provided. - auth_token = auth_token[-36:] api_key = get_api_key_by_secret(auth_token) - - # TODO: uncomment this when the grace period for the token prefix is over - # check for token prefix - # if current_app.config["API_KEY_PREFIX"] not in orig_token: - # raise AuthError("Invalid token: you must re-generate your API key to continue using GC Notify", 403, service_id=api_key.service.id, api_key_id=api_key.id) - except NoResultFound: raise AuthError("Invalid token: API key not found", 403) + except ValueError: + raise AuthError("Invalid token: Enter your full API key", 403) _auth_with_api_key(api_key, api_key.service) diff --git a/app/dao/api_key_dao.py b/app/dao/api_key_dao.py index f6a707bf76..1c9348d018 100644 --- a/app/dao/api_key_dao.py +++ b/app/dao/api_key_dao.py @@ -83,13 +83,30 @@ def update_compromised_api_key_info(service_id, api_key_id, compromised_info): db.session.add(api_key) -def get_api_key_by_secret(secret): - signed_with_all_keys = signer_api_key.sign_with_all_keys(str(secret)) +def get_api_key_by_secret(secret, service_id=None): + # Check the first part of the secret is the gc prefix + if current_app.config["API_KEY_PREFIX"] != secret[: len(current_app.config["API_KEY_PREFIX"])]: + raise ValueError() + + # Check if the remaining part of the secret is a the valid api key + token = secret[-36:] + signed_with_all_keys = signer_api_key.sign_with_all_keys(str(token)) for signed_secret in signed_with_all_keys: try: - return db.on_reader().query(ApiKey).filter_by(_secret=signed_secret).options(joinedload("service")).one() + api_key = db.on_reader().query(ApiKey).filter_by(_secret=signed_secret).options(joinedload("service")).one() except NoResultFound: - pass + raise NoResultFound() + + # Check the middle portion of the secret is the valid service id + if api_key and api_key.service_id: + if len(secret) >= 79: + service_id_from_token = str(secret[-73:-37]) + if str(api_key.service_id) != service_id_from_token: + raise ValueError() + else: + raise ValueError() + if api_key: + return api_key raise NoResultFound() diff --git a/tests/app/api_key/test_rest.py b/tests/app/api_key/test_rest.py index a28985884a..d236f8cd8b 100644 --- a/tests/app/api_key/test_rest.py +++ b/tests/app/api_key/test_rest.py @@ -81,6 +81,9 @@ def test_revoke_api_keys_with_valid_auth_revokes_and_notifies_user(self, client, api_key_1 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 1") unsigned_secret = get_unsigned_secret(api_key_1.id) + # Create token expected from the frontend + unsigned_secret = f"gcntfy-keyname-{service.id}-{unsigned_secret}" + sre_auth_header = create_sre_authorization_header() response = client.post( url_for("sre_tools.revoke_api_keys"), @@ -89,7 +92,7 @@ def test_revoke_api_keys_with_valid_auth_revokes_and_notifies_user(self, client, ) # Get api key from DB - api_key_1 = get_api_key_by_secret(api_key_1.secret) + api_key_1 = get_api_key_by_secret(unsigned_secret) assert response.status_code == 201 assert api_key_1.expiry_date is not None assert api_key_1.compromised_key_info["type"] == "cds-tester" diff --git a/tests/app/authentication/test_authentication.py b/tests/app/authentication/test_authentication.py index 16299507b9..4937360871 100644 --- a/tests/app/authentication/test_authentication.py +++ b/tests/app/authentication/test_authentication.py @@ -135,18 +135,20 @@ def test_admin_auth_should_not_allow_api_key_scheme(client, sample_api_key): @pytest.mark.parametrize("scheme", ["ApiKey-v1", "apikey-v1", "APIKEY-V1"]) def test_should_allow_auth_with_api_key_scheme(client, sample_api_key, scheme): api_key_secret = get_unsigned_secret(sample_api_key.id) - - response = client.get("/notifications", headers={"Authorization": f"{scheme} {api_key_secret}"}) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + response = client.get("/notifications", headers={"Authorization": f"{scheme} {unsigned_secret}"}) assert response.status_code == 200 -def test_should_allow_auth_with_api_key_scheme_36_chars_or_longer(client, sample_api_key): +def test_should_NOT_allow_auth_with_api_key_scheme_with_incorrect_format(client, sample_api_key): api_key_secret = "fhsdkjhfdsfhsd" + get_unsigned_secret(sample_api_key.id) response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {api_key_secret}"}) - assert response.status_code == 200 + assert response.status_code == 403 + error_message = json.loads(response.get_data()) + assert error_message["message"] == {"token": ["Invalid token: Enter your full API key"]} def test_should_not_allow_invalid_api_key(client, sample_api_key): @@ -154,7 +156,7 @@ def test_should_not_allow_invalid_api_key(client, sample_api_key): assert response.status_code == 403 error_message = json.loads(response.get_data()) - assert error_message["message"] == {"token": ["Invalid token: API key not found"]} + assert error_message["message"] == {"token": ["Invalid token: Enter your full API key"]} def test_should_not_allow_expired_api_key(client, sample_api_key): @@ -162,7 +164,9 @@ def test_should_not_allow_expired_api_key(client, sample_api_key): expire_api_key(service_id=sample_api_key.service_id, api_key_id=sample_api_key.id) - response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {api_key_secret}"}) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + + response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {unsigned_secret}"}) assert response.status_code == 403 error_message = json.loads(response.get_data()) diff --git a/tests/app/dao/test_api_key_dao.py b/tests/app/dao/test_api_key_dao.py index 5e23002171..eabb4cbdbf 100644 --- a/tests/app/dao/test_api_key_dao.py +++ b/tests/app/dao/test_api_key_dao.py @@ -116,12 +116,27 @@ def test_get_unsigned_secret_returns_key(sample_api_key): assert unsigned_api_key == sample_api_key.secret -def test_get_api_key_by_secret(sample_api_key): - unsigned_secret = get_unsigned_secret(sample_api_key.id) - assert get_api_key_by_secret(unsigned_secret).id == sample_api_key.id - - with pytest.raises(NoResultFound): - get_api_key_by_secret("nope") +class TestGetAPIKeyBySecret: + def test_get_api_key_by_secret(self, sample_api_key): + secret = get_unsigned_secret(sample_api_key.id) + # Create token expected from the frontend + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{secret}" + assert get_api_key_by_secret(unsigned_secret).id == sample_api_key.id + + with pytest.raises(ValueError): + get_api_key_by_secret("nope") + + # Test getting secret without the keyname prefix + with pytest.raises(ValueError): + get_api_key_by_secret(str(sample_api_key.id)) + + # Test the service_name isnt part of the secret + with pytest.raises(ValueError): + get_api_key_by_secret(f"gcntfy-keyname-hello-{secret}") + + # Test the secret is incorrect + with pytest.raises(NoResultFound): + get_api_key_by_secret(f"gcntfy-keyname-hello-{sample_api_key.service_id}-1234") def test_should_not_allow_duplicate_key_names_per_service(sample_api_key, fake_uuid): diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index 84b868168d..2e3d1d36b2 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -1515,6 +1515,8 @@ def __send_sms(): key_type=key_type, ) save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" with set_config_values(notify_api, {"REDIS_ENABLED": True}): response = client.post( @@ -1522,7 +1524,7 @@ def __send_sms(): data=json.dumps(data), headers=[ ("Content-Type", "application/json"), - ("Authorization", f"ApiKey-v1 {get_unsigned_secret(api_key.id)}"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), ], ) return response @@ -1563,6 +1565,8 @@ def __send_sms(): key_type=key_type, ) save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" with set_config_values(notify_api, {"REDIS_ENABLED": True}): response = client.post( @@ -1570,7 +1574,7 @@ def __send_sms(): data=json.dumps(data), headers=[ ("Content-Type", "application/json"), - ("Authorization", f"ApiKey-v1 {get_unsigned_secret(api_key.id)}"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), ], ) return response @@ -1607,6 +1611,8 @@ def __send_sms(): key_type=key_type, ) save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" with set_config_values(notify_api, {"REDIS_ENABLED": True}): response = client.post( @@ -1614,7 +1620,7 @@ def __send_sms(): data=json.dumps(data), headers=[ ("Content-Type", "application/json"), - ("Authorization", f"ApiKey-v1 {get_unsigned_secret(api_key.id)}"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), ], ) return response From 6767588ee8245ee53d0badf0441e2f0bda146d91 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Tue, 26 Mar 2024 14:11:53 -0400 Subject: [PATCH 31/33] handle extra spaces in api key auth line (#2150) --- app/authentication/auth.py | 2 +- tests/app/authentication/test_authentication.py | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/app/authentication/auth.py b/app/authentication/auth.py index 260a3fc78e..5fe609a060 100644 --- a/app/authentication/auth.py +++ b/app/authentication/auth.py @@ -63,7 +63,7 @@ def get_auth_token(req): for el in AUTH_TYPES: scheme, auth_type, _ = el if auth_header.lower().startswith(scheme.lower()): - token = auth_header[len(scheme) + 1 :] + token = auth_header[len(scheme) + 1 :].strip() return auth_type, token raise AuthError( diff --git a/tests/app/authentication/test_authentication.py b/tests/app/authentication/test_authentication.py index 4937360871..40615fc6a6 100644 --- a/tests/app/authentication/test_authentication.py +++ b/tests/app/authentication/test_authentication.py @@ -141,6 +141,14 @@ def test_should_allow_auth_with_api_key_scheme(client, sample_api_key, scheme): assert response.status_code == 200 +def test_should_allow_auth_with_api_key_scheme_and_extra_spaces(client, sample_api_key): + api_key_secret = get_unsigned_secret(sample_api_key.id) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {unsigned_secret}"}) + + assert response.status_code == 200 + + def test_should_NOT_allow_auth_with_api_key_scheme_with_incorrect_format(client, sample_api_key): api_key_secret = "fhsdkjhfdsfhsd" + get_unsigned_secret(sample_api_key.id) From 51377adc4843f97ee3ac7f959163707055ca57fc Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Tue, 26 Mar 2024 15:29:37 -0400 Subject: [PATCH 32/33] Add organisation info to the freshdesk branding request (#2149) * Add organisation info to the freshdesk branding request * Formatting * fix test * formatting * Fix tests, correct a typo * Fix tests..again :) --- app/clients/freshdesk.py | 4 ++++ app/user/contact_request.py | 1 + app/user/rest.py | 2 ++ tests/app/clients/test_freshdesk.py | 6 ++++++ tests/app/user/test_rest.py | 5 ++++- 5 files changed, 17 insertions(+), 1 deletion(-) diff --git a/app/clients/freshdesk.py b/app/clients/freshdesk.py index 8194cb29c4..66c97546fa 100644 --- a/app/clients/freshdesk.py +++ b/app/clients/freshdesk.py @@ -59,11 +59,15 @@ def _generate_description(self): f"A new logo has been uploaded by {self.contact.name} ({self.contact.email_address}) for the following service:", f"- Service id: {self.contact.service_id}", f"- Service name: {self.contact.service_name}", + f"- Organisation id: {self.contact.organisation_id}", + f"- Organisation name: {self.contact.department_org_name}", f"- Logo filename: {self.contact.branding_url}", "
", f"Un nouveau logo a été téléchargé par {self.contact.name} ({self.contact.email_address}) pour le service suivant :", f"- Identifiant du service : {self.contact.service_id}", f"- Nom du service : {self.contact.service_name}", + f"- Identifiant de l'organisation: {self.contact.organisation_id}", + f"- Nom de l'organisation: {self.contact.department_org_name}", f"- Nom du fichier du logo : {self.contact.branding_url}", ] ) diff --git a/app/user/contact_request.py b/app/user/contact_request.py index 7317a69302..edb02ad956 100644 --- a/app/user/contact_request.py +++ b/app/user/contact_request.py @@ -16,6 +16,7 @@ class ContactRequest: name: str = field(default="") message: str = field(default="") user_profile: str = field(default="") + organisation_id: str = field(default="") department_org_name: str = field(default="") program_service_name: str = field(default="") intended_recipients: str = field(default="") diff --git a/app/user/rest.py b/app/user/rest.py index d6f41c8fd5..f52893f252 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -497,6 +497,8 @@ def send_branding_request(user_id): email_address=user.email_address, service_id=data["serviceID"], service_name=data["service_name"], + organisation_id=data["organisation_id"], + department_org_name=data["organisation_name"], branding_url=get_logo_url(data["filename"]), ) contact.tags = ["z_skip_opsgenie", "z_skip_urgent_escalation"] diff --git a/tests/app/clients/test_freshdesk.py b/tests/app/clients/test_freshdesk.py index f730764f29..a3e0713d5f 100644 --- a/tests/app/clients/test_freshdesk.py +++ b/tests/app/clients/test_freshdesk.py @@ -127,11 +127,15 @@ def match_json(request): "description": "A new logo has been uploaded by name (test@email.com) for the following service:
" "- Service id: 8624bd36-b70b-4d4b-a459-13e1f4770b92
" "- Service name: t6
" + "- Organisation id: 6b72e84f-8591-42e1-93b8-7d24a45e1d79
" + "- Organisation name: best org name ever
" "- Logo filename: branding_url
" "

" "Un nouveau logo a été téléchargé par name (test@email.com) pour le service suivant :
" "- Identifiant du service : 8624bd36-b70b-4d4b-a459-13e1f4770b92
" "- Nom du service : t6
" + "- Identifiant de l'organisation: 6b72e84f-8591-42e1-93b8-7d24a45e1d79
" + "- Nom de l'organisation: best org name ever
" "- Nom du fichier du logo : branding_url", "email": "test@email.com", "priority": 1, @@ -158,6 +162,8 @@ def match_json(request): "friendly_support_type": "Branding request", "support_type": "branding_request", "service_name": "t6", + "organisation_id": "6b72e84f-8591-42e1-93b8-7d24a45e1d79", + "department_org_name": "best org name ever", "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", "branding_url": "branding_url", } diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 37c2806144..8a480848bd 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -954,12 +954,15 @@ def test_send_contact_request_go_live_with_org_notes(organisation_notes, departm assert mock_contact_request.department_org_name == department_org_name -def test_send_branding_request(client, sample_service, mocker): +def test_send_branding_request(client, sample_service, sample_organisation, mocker): sample_user = sample_service.users[0] + sample_service.organisation = sample_organisation post_data = { "service_name": sample_service.name, "email_address": sample_user.email_address, "serviceID": str(sample_service.id), + "organisation_id": str(sample_service.organisation.id), + "organisation_name": sample_service.organisation.name, "filename": "branding_url", } mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) From 7c19d9fc36c79693445540d12640be1742ad3af7 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Tue, 2 Apr 2024 14:46:16 -0400 Subject: [PATCH 33/33] remove unused queues (#2148) --- app/celery/provider_tasks.py | 8 ++++---- app/config.py | 12 ------------ scripts/run_celery.ps1 | 2 +- scripts/run_celery.sh | 2 +- scripts/run_celery_local.sh | 2 +- scripts/run_celery_no_sms_sending.sh | 4 ++-- scripts/run_celery_send_email.sh | 3 +-- scripts/run_celery_send_sms.sh | 3 +-- tests/app/test_config.py | 4 +--- 9 files changed, 12 insertions(+), 28 deletions(-) diff --git a/app/celery/provider_tasks.py b/app/celery/provider_tasks.py index 0539bd6ce1..4f21f9f2d9 100644 --- a/app/celery/provider_tasks.py +++ b/app/celery/provider_tasks.py @@ -42,10 +42,10 @@ def deliver_throttled_sms(self, notification_id): # Celery rate limits are per worker instance and not a global rate limit. # https://docs.celeryproject.org/en/stable/userguide/tasks.html#Task.rate_limit -# This task is dispatched through the `send-sms-tasks` queue. -# This queue is consumed by 6 Celery instances with 4 workers in production. -# The maximum throughput is therefore 6 instances * 4 workers = 24 tasks per second -# if we set rate_limit="1/s" on the Celery task +# We currently set rate_limit="1/s" on the Celery task and 4 workers per pod, and so a limit of 4 tasks per second per pod. +# The number of pods is controlled by the Kubernetes HPA and scales up and down with demand. +# Currently in production we have 3 celery-sms-send-primary pods, and up to 20 celery-sms-send-scalable pods +# This means we can send up to 92 messages per second. @notify_celery.task( bind=True, name="deliver_sms", diff --git a/app/config.py b/app/config.py index d08a88b854..b8f8f8a3ec 100644 --- a/app/config.py +++ b/app/config.py @@ -78,18 +78,11 @@ class QueueNames(object): # A queue for the tasks associated with the batch saving NOTIFY_CACHE = "notifiy-cache-tasks" - # For normal send of notifications. This is relatively normal volume and flushed - # pretty quickly. - SEND_NORMAL_QUEUE = "send-{}-tasks" # notification type to be filled in the queue name - # Queues for sending all SMS, except long dedicated numbers. SEND_SMS_HIGH = "send-sms-high" SEND_SMS_MEDIUM = "send-sms-medium" SEND_SMS_LOW = "send-sms-low" - # TODO: Delete this queue once we verify that it is not used anymore. - SEND_SMS = "send-sms-tasks" - # Primarily used for long dedicated numbers sent from us-west-2 upon which # we have a limit to send per second and hence, needs to be throttled. SEND_THROTTLED_SMS = "send-throttled-sms-tasks" @@ -99,9 +92,6 @@ class QueueNames(object): SEND_EMAIL_MEDIUM = "send-email-medium" SEND_EMAIL_LOW = "send-email-low" - # TODO: Delete this queue once we verify that it is not used anymore. - SEND_EMAIL = "send-email-tasks" - # The research mode queue for notifications that are tested by users trying # out Notify. RESEARCH_MODE = "research-mode-tasks" @@ -158,12 +148,10 @@ def all_queues(): QueueNames.SEND_SMS_HIGH, QueueNames.SEND_SMS_MEDIUM, QueueNames.SEND_SMS_LOW, - QueueNames.SEND_SMS, QueueNames.SEND_THROTTLED_SMS, QueueNames.SEND_EMAIL_HIGH, QueueNames.SEND_EMAIL_MEDIUM, QueueNames.SEND_EMAIL_LOW, - QueueNames.SEND_EMAIL, QueueNames.RESEARCH_MODE, QueueNames.REPORTING, QueueNames.JOBS, diff --git a/scripts/run_celery.ps1 b/scripts/run_celery.ps1 index 724b47766e..16203f1d16 100644 --- a/scripts/run_celery.ps1 +++ b/scripts/run_celery.ps1 @@ -1,3 +1,3 @@ $ENV:FORKED_BY_MULTIPROCESSING=1 -celery --app run_celery worker --pidfile="$env:TEMP\celery.pid" --pool=solo --loglevel=DEBUG --concurrency=1 -Q "database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-email-tasks,service-callbacks,delivery-receipts" +celery --app run_celery worker --pidfile="$env:TEMP\celery.pid" --pool=solo --loglevel=DEBUG --concurrency=1 -Q "database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,service-callbacks,delivery-receipts" diff --git a/scripts/run_celery.sh b/scripts/run_celery.sh index 99f09ac9bb..2cf10f82fd 100755 --- a/scripts/run_celery.sh +++ b/scripts/run_celery.sh @@ -6,4 +6,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-email-tasks,service-callbacks,delivery-receipts +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,service-callbacks,delivery-receipts diff --git a/scripts/run_celery_local.sh b/scripts/run_celery_local.sh index d9f439b8f9..58a914e2ab 100755 --- a/scripts/run_celery_local.sh +++ b/scripts/run_celery_local.sh @@ -7,4 +7,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -celery -A run_celery.notify_celery worker --beat --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-throttled-sms-tasks,send-email-high,send-email-medium,send-email-low,send-email-tasks,service-callbacks,delivery-receipts +celery -A run_celery.notify_celery worker --beat --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,send-throttled-sms-tasks,send-email-high,send-email-medium,send-email-low,service-callbacks,delivery-receipts diff --git a/scripts/run_celery_no_sms_sending.sh b/scripts/run_celery_no_sms_sending.sh index cebefa7435..6c86bc2758 100755 --- a/scripts/run_celery_no_sms_sending.sh +++ b/scripts/run_celery_no_sms_sending.sh @@ -3,7 +3,7 @@ set -e # Runs celery with all celery queues except send-throttled-sms-tasks, -# send-sms-tasks, send-sms-high, send-sms-medium, or send-sms-low. +# send-sms-high, send-sms-medium, or send-sms-low. # Check and see if this is running in K8s and if so, wait for cloudwatch agent if [ -n "${STATSD_HOST}" ]; then @@ -28,4 +28,4 @@ fi echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-email-tasks,send-email-high,send-email-medium,send-email-low,service-callbacks,delivery-receipts +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-email-high,send-email-medium,send-email-low,service-callbacks,delivery-receipts diff --git a/scripts/run_celery_send_email.sh b/scripts/run_celery_send_email.sh index 98fda14a68..29c6039f09 100755 --- a/scripts/run_celery_send_email.sh +++ b/scripts/run_celery_send_email.sh @@ -6,5 +6,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -# TODO: we shouldn't be using the send-email-tasks queue anymore, once we verify this we can remove it -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-email-tasks,send-email-high,send-email-medium,send-email-low +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-email-high,send-email-medium,send-email-low diff --git a/scripts/run_celery_send_sms.sh b/scripts/run_celery_send_sms.sh index 5f7865b62c..7aee759338 100755 --- a/scripts/run_celery_send_sms.sh +++ b/scripts/run_celery_send_sms.sh @@ -6,5 +6,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -# TODO: we shouldn't be using the send-sms-tasks queue anymore - once we verify this we can remove it -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-sms-high,send-sms-medium,send-sms-low diff --git a/tests/app/test_config.py b/tests/app/test_config.py index 5b25f4b093..a722e1ea42 100644 --- a/tests/app/test_config.py +++ b/tests/app/test_config.py @@ -27,7 +27,7 @@ def reload_config(): def test_queue_names_all_queues_correct(): # Need to ensure that all_queues() only returns queue names used in API queues = QueueNames.all_queues() - assert len(queues) == 23 + assert len(queues) == 21 assert set( [ QueueNames.PRIORITY, @@ -37,12 +37,10 @@ def test_queue_names_all_queues_correct(): QueueNames.PRIORITY_DATABASE, QueueNames.NORMAL_DATABASE, QueueNames.BULK_DATABASE, - QueueNames.SEND_SMS, QueueNames.SEND_SMS_HIGH, QueueNames.SEND_SMS_MEDIUM, QueueNames.SEND_SMS_LOW, QueueNames.SEND_THROTTLED_SMS, - QueueNames.SEND_EMAIL, QueueNames.SEND_EMAIL_HIGH, QueueNames.SEND_EMAIL_MEDIUM, QueueNames.SEND_EMAIL_LOW,