Skip to content

Commit

Permalink
Reinstate (#2344) - Add integration tests (#2366)
Browse files Browse the repository at this point in the history
* bump utils for annual limit client fixes

* update lock file

* Added integration-style test

- Adjusted existing tests to account for bug fixes that will land with utils 52.4.0

* refresh lock

* refresh lock again

* Fix botched merge and formatting

* Refresh lock

* Reinstate code from (#2344)

* Clear Redis notification counts during nightly fact job

* Add seeding logic to receipt tasks

* Squash duplicate counting bug

- Fix tests
- Monthly stats no longer returns stats for the current day
- Misc. cleanups comments

* Add feature flag checks

* Fix test
  • Loading branch information
whabanks authored Nov 21, 2024
1 parent 7da719b commit f417d8c
Show file tree
Hide file tree
Showing 16 changed files with 619 additions and 178 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ jobs:
run: |
cp -f .env.example .env
- name: Checks for new endpoints against AWS WAF rules
uses: cds-snc/notification-utils/.github/actions/waffles@52.3.6
uses: cds-snc/notification-utils/.github/actions/waffles@52.4.0
with:
app-loc: '/github/workspace'
app-libs: '/github/workspace/env/site-packages'
Expand Down
40 changes: 35 additions & 5 deletions app/celery/process_pinpoint_receipts_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,15 @@

from flask import current_app, json
from notifications_utils.statsd_decorators import statsd
from notifications_utils.timezones import convert_utc_to_local_timezone
from sqlalchemy.orm.exc import NoResultFound

from app import annual_limit_client, notify_celery, statsd_client
from app.config import QueueNames
from app.dao import notifications_dao
from app.dao.fact_notification_status_dao import (
fetch_notification_status_for_service_for_day,
)
from app.models import (
NOTIFICATION_DELIVERED,
NOTIFICATION_PERMANENT_FAILURE,
Expand All @@ -17,6 +21,7 @@
PINPOINT_PROVIDER,
)
from app.notifications.callbacks import _check_and_queue_callback_task
from app.utils import prepare_notification_counts_for_seeding
from celery.exceptions import Retry

# Pinpoint receipts are of the form:
Expand Down Expand Up @@ -106,6 +111,22 @@ def process_pinpoint_results(self, response):
sms_origination_phone_number=origination_phone_number,
)

service_id = notification.service_id
# Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification
# is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count.
notifications_to_seed = None

if current_app.config["FF_ANNUAL_LIMIT"]:
if not annual_limit_client.was_seeded_today(service_id):
annual_limit_client.set_seeded_at(service_id)
notifications_to_seed = fetch_notification_status_for_service_for_day(
convert_utc_to_local_timezone(datetime.utcnow()),
service_id=service_id,
)
annual_limit_client.seed_annual_limit_notifications(
service_id, prepare_notification_counts_for_seeding(notifications_to_seed)
)

if notification_status != NOTIFICATION_DELIVERED:
current_app.logger.info(
(
Expand All @@ -115,25 +136,34 @@ def process_pinpoint_results(self, response):
)
# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
annual_limit_client.increment_sms_failed(notification.service_id)
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_sms_failed(service_id)
current_app.logger.info(
f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
f"Incremented sms_delivered count in Redis. Service: {service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(service_id)}"
)
else:
current_app.logger.info(
f"Pinpoint callback return status of {notification_status} for notification: {notification.id}"
)

# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
annual_limit_client.increment_sms_delivered(notification.service_id)
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_sms_delivered(service_id)
current_app.logger.info(
f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
f"Incremented sms_delivered count in Redis. Service: {service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(service_id)}"
)

statsd_client.incr(f"callback.pinpoint.{notification_status}")

if notification.sent_at:
statsd_client.timing_with_dates("callback.pinpoint.elapsed-time", datetime.utcnow(), notification.sent_at)
statsd_client.timing_with_dates(
"callback.pinpoint.elapsed-time",
datetime.utcnow(),
notification.sent_at,
)

_check_and_queue_callback_task(notification)

Expand Down
29 changes: 27 additions & 2 deletions app/celery/process_ses_receipts_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,23 @@

from flask import current_app, json
from notifications_utils.statsd_decorators import statsd
from notifications_utils.timezones import convert_utc_to_local_timezone
from sqlalchemy.orm.exc import NoResultFound

from app import annual_limit_client, bounce_rate_client, notify_celery, statsd_client
from app.config import QueueNames
from app.dao import notifications_dao
from app.dao.fact_notification_status_dao import (
fetch_notification_status_for_service_for_day,
)
from app.models import NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE
from app.notifications.callbacks import _check_and_queue_callback_task
from app.notifications.notifications_ses_callback import (
_check_and_queue_complaint_callback_task,
get_aws_responses,
handle_complaint,
)
from app.utils import prepare_notification_counts_for_seeding
from celery.exceptions import Retry


Expand Down Expand Up @@ -83,14 +88,32 @@ def process_ses_results(self, response): # noqa: C901
bounce_response=aws_response_dict.get("bounce_response", None),
)

service_id = notification.service_id
# Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification
# is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count.
notifications_to_seed = None
# Check if we have already seeded the annual limit counts for today
if current_app.config["FF_ANNUAL_LIMIT"]:
if not annual_limit_client.was_seeded_today(service_id):
annual_limit_client.set_seeded_at(service_id)
notifications_to_seed = fetch_notification_status_for_service_for_day(
convert_utc_to_local_timezone(datetime.utcnow()),
service_id=service_id,
)
annual_limit_client.seed_annual_limit_notifications(
service_id, prepare_notification_counts_for_seeding(notifications_to_seed)
)

if not aws_response_dict["success"]:
current_app.logger.info(
"SES delivery failed: notification id {} and reference {} has error found. Status {}".format(
notification.id, reference, aws_response_dict["message"]
)
)
if current_app.config["FF_ANNUAL_LIMIT"]:
annual_limit_client.increment_email_failed(notification.service_id)
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_email_failed(notification.service_id)
current_app.logger.info(
f"Incremented email_failed count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
Expand All @@ -99,7 +122,9 @@ def process_ses_results(self, response): # noqa: C901
"SES callback return status of {} for notification: {}".format(notification_status, notification.id)
)
if current_app.config["FF_ANNUAL_LIMIT"]:
annual_limit_client.increment_email_delivered(notification.service_id)
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_email_delivered(notification.service_id)
current_app.logger.info(
f"Incremented email_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
Expand Down
28 changes: 26 additions & 2 deletions app/celery/process_sns_receipts_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,15 @@

from flask import current_app, json
from notifications_utils.statsd_decorators import statsd
from notifications_utils.timezones import convert_utc_to_local_timezone
from sqlalchemy.orm.exc import NoResultFound

from app import annual_limit_client, notify_celery, statsd_client
from app.config import QueueNames
from app.dao import notifications_dao
from app.dao.fact_notification_status_dao import (
fetch_notification_status_for_service_for_day,
)
from app.models import (
NOTIFICATION_DELIVERED,
NOTIFICATION_PERMANENT_FAILURE,
Expand All @@ -16,6 +20,7 @@
SNS_PROVIDER,
)
from app.notifications.callbacks import _check_and_queue_callback_task
from app.utils import prepare_notification_counts_for_seeding
from celery.exceptions import Retry


Expand Down Expand Up @@ -64,6 +69,21 @@ def process_sns_results(self, response):
provider_response=provider_response,
)

service_id = notification.service_id
# Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification
# is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count.
notifications_to_seed = None
if current_app.config["FF_ANNUAL_LIMIT"]:
if not annual_limit_client.was_seeded_today(service_id):
annual_limit_client.set_seeded_at(service_id)
notifications_to_seed = fetch_notification_status_for_service_for_day(
convert_utc_to_local_timezone(datetime.utcnow()),
service_id=service_id,
)
annual_limit_client.seed_annual_limit_notifications(
service_id, prepare_notification_counts_for_seeding(notifications_to_seed)
)

if notification_status != NOTIFICATION_DELIVERED:
current_app.logger.info(
(
Expand All @@ -73,15 +93,19 @@ def process_sns_results(self, response):
)
# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
annual_limit_client.increment_sms_failed(notification.service_id)
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_sms_failed(notification.service_id)
current_app.logger.info(
f"Incremented sms_failed count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
else:
current_app.logger.info(f"SNS callback return status of {notification_status} for notification: {notification.id}")
# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
annual_limit_client.increment_sms_delivered(notification.service_id)
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_sms_delivered(notification.service_id)
current_app.logger.info(
f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
Expand Down
6 changes: 5 additions & 1 deletion app/celery/reporting_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from notifications_utils.statsd_decorators import statsd
from notifications_utils.timezones import convert_utc_to_local_timezone

from app import notify_celery
from app import annual_limit_client, notify_celery
from app.config import QueueNames
from app.cronitor import cronitor
from app.dao.annual_limits_data_dao import (
Expand Down Expand Up @@ -120,6 +120,10 @@ def create_nightly_notification_status_for_day(process_day):
len(transit_data), process_day, chunk
)
)
# TODO: FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
annual_limit_client.reset_all_notification_counts(chunk)

except Exception as e:
current_app.logger.error(
"create-nightly-notification-status-for-day task failed for day: {}, for service_ids: {}. Error: {}".format(
Expand Down
20 changes: 13 additions & 7 deletions app/dao/fact_notification_status_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,20 +130,26 @@ def update_fact_notification_status(data, process_day, service_ids=None):


def fetch_notification_status_for_service_by_month(start_date, end_date, service_id):
filters = [
FactNotificationStatus.service_id == service_id,
FactNotificationStatus.bst_date >= start_date.strftime("%Y-%m-%d"),
# This works only for timezones to the west of GMT
FactNotificationStatus.bst_date < end_date.strftime("%Y-%m-%d"),
FactNotificationStatus.key_type != KEY_TYPE_TEST,
]

# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
filters.append(FactNotificationStatus.bst_date != datetime.utcnow().date().strftime("%Y-%m-%d"))

return (
db.session.query(
func.date_trunc("month", FactNotificationStatus.bst_date).label("month"),
FactNotificationStatus.notification_type,
FactNotificationStatus.notification_status,
func.sum(FactNotificationStatus.notification_count).label("count"),
)
.filter(
FactNotificationStatus.service_id == service_id,
FactNotificationStatus.bst_date >= start_date.strftime("%Y-%m-%d"),
# This works only for timezones to the west of GMT
FactNotificationStatus.bst_date < end_date.strftime("%Y-%m-%d"),
FactNotificationStatus.key_type != KEY_TYPE_TEST,
)
.filter(*filters)
.group_by(
func.date_trunc("month", FactNotificationStatus.bst_date).label("month"),
FactNotificationStatus.notification_type,
Expand Down
8 changes: 6 additions & 2 deletions app/service/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,10 @@ def create_api_key(service_id=None):
unsigned_api_key = get_unsigned_secret(valid_api_key.id)

# prefix the API key so they keys can be easily identified for security scanning
keydata = {"key": unsigned_api_key, "key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name}
keydata = {
"key": unsigned_api_key,
"key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name,
}

return jsonify(data=keydata), 201

Expand Down Expand Up @@ -631,7 +634,8 @@ def get_monthly_notification_stats(service_id):
statistics.add_monthly_notification_status_stats(data, stats)

now = datetime.utcnow()
if end_date > now:
# TODO FF_ANNUAL_LIMIT removal
if not current_app.config["FF_ANNUAL_LIMIT"] and end_date > now:
todays_deltas = fetch_notification_status_for_service_for_day(convert_utc_to_local_timezone(now), service_id=service_id)
statistics.add_monthly_notification_status_stats(data, todays_deltas)

Expand Down
35 changes: 35 additions & 0 deletions app/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,16 @@

local_timezone = pytz.timezone(os.getenv("TIMEZONE", "America/Toronto"))

DELIVERED_STATUSES = ["delivered", "sent", "returned-letter"]
FAILURE_STATUSES = [
"failed",
"temporary-failure",
"permanent-failure",
"technical-failure",
"virus-scan-failed",
"validation-failed",
]


def pagination_links(pagination, endpoint, **kwargs):
if "page" in kwargs:
Expand Down Expand Up @@ -221,3 +231,28 @@ def get_limit_reset_time_et() -> dict[str, str]:

limit_reset_time_et = {"12hr": next_midnight_utc_in_et.strftime("%-I%p"), "24hr": next_midnight_utc_in_et.strftime("%H")}
return limit_reset_time_et


def prepare_notification_counts_for_seeding(notification_counts: list) -> dict:
"""Utility method that transforms a list of notification counts into a dictionary, mapping notification counts by type and success/failure.
Used to seed notification counts in Redis for annual limits.
e.g.
```
[(datetime, 'email', 'sent', 1),
(datetime, 'sms', 'sent', 2)]
```
Becomes:
```
{'email_sent': 1, 'sms_sent': 2}
```
Args:
notification_counts (list): A list of tuples containing (date, notification_type, status, count)
Returns:
dict: That acts as a mapping to build the notification counts in Redis
"""
return {
f"{notification_type}_{'delivered' if status in DELIVERED_STATUSES else 'failed'}": count
for _, notification_type, status, count in notification_counts
if status in DELIVERED_STATUSES or status in FAILURE_STATUSES
}
8 changes: 4 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ Werkzeug = "3.0.4"
MarkupSafe = "2.1.5"
# REVIEW: v2 is using sha512 instead of sha1 by default (in v1)
itsdangerous = "2.2.0"
notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.6" }
notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.4.0" }

# rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8
typing-extensions = "4.12.2"
Expand Down
Loading

0 comments on commit f417d8c

Please sign in to comment.