Skip to content

Commit

Permalink
Revert "Clear Redis notification counts during nightly fact job (#2344)…
Browse files Browse the repository at this point in the history
…" (#2354)

This reverts commit ad21ce1.
  • Loading branch information
whabanks authored Nov 14, 2024
1 parent e197b99 commit ffa8dbd
Show file tree
Hide file tree
Showing 15 changed files with 177 additions and 458 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ jobs:
run: |
cp -f .env.example .env
- name: Checks for new endpoints against AWS WAF rules
uses: cds-snc/notification-utils/.github/actions/[email protected].7
uses: cds-snc/notification-utils/.github/actions/[email protected].6
with:
app-loc: '/github/workspace'
app-libs: '/github/workspace/env/site-packages'
Expand Down
40 changes: 5 additions & 35 deletions app/celery/process_pinpoint_receipts_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,11 @@

from flask import current_app, json
from notifications_utils.statsd_decorators import statsd
from notifications_utils.timezones import convert_utc_to_local_timezone
from sqlalchemy.orm.exc import NoResultFound

from app import annual_limit_client, notify_celery, statsd_client
from app.config import QueueNames
from app.dao import notifications_dao
from app.dao.fact_notification_status_dao import (
fetch_notification_status_for_service_for_day,
)
from app.models import (
NOTIFICATION_DELIVERED,
NOTIFICATION_PERMANENT_FAILURE,
Expand All @@ -21,7 +17,6 @@
PINPOINT_PROVIDER,
)
from app.notifications.callbacks import _check_and_queue_callback_task
from app.utils import prepare_notification_counts_for_seeding
from celery.exceptions import Retry

# Pinpoint receipts are of the form:
Expand Down Expand Up @@ -111,22 +106,6 @@ def process_pinpoint_results(self, response):
sms_origination_phone_number=origination_phone_number,
)

service_id = notification.service_id
# Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification
# is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count.
notifications_to_seed = None

if current_app.config["FF_ANNUAL_LIMIT"]:
if not annual_limit_client.was_seeded_today(service_id):
annual_limit_client.set_seeded_at(service_id)
notifications_to_seed = fetch_notification_status_for_service_for_day(
convert_utc_to_local_timezone(datetime.utcnow()),
service_id=service_id,
)
annual_limit_client.seed_annual_limit_notifications(
service_id, prepare_notification_counts_for_seeding(notifications_to_seed)
)

if notification_status != NOTIFICATION_DELIVERED:
current_app.logger.info(
(
Expand All @@ -136,34 +115,25 @@ def process_pinpoint_results(self, response):
)
# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_sms_failed(service_id)
annual_limit_client.increment_sms_failed(notification.service_id)
current_app.logger.info(
f"Incremented sms_delivered count in Redis. Service: {service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(service_id)}"
f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
else:
current_app.logger.info(
f"Pinpoint callback return status of {notification_status} for notification: {notification.id}"
)

# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_sms_delivered(service_id)
annual_limit_client.increment_sms_delivered(notification.service_id)
current_app.logger.info(
f"Incremented sms_delivered count in Redis. Service: {service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(service_id)}"
f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)

statsd_client.incr(f"callback.pinpoint.{notification_status}")

if notification.sent_at:
statsd_client.timing_with_dates(
"callback.pinpoint.elapsed-time",
datetime.utcnow(),
notification.sent_at,
)
statsd_client.timing_with_dates("callback.pinpoint.elapsed-time", datetime.utcnow(), notification.sent_at)

_check_and_queue_callback_task(notification)

Expand Down
29 changes: 2 additions & 27 deletions app/celery/process_ses_receipts_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,18 @@

from flask import current_app, json
from notifications_utils.statsd_decorators import statsd
from notifications_utils.timezones import convert_utc_to_local_timezone
from sqlalchemy.orm.exc import NoResultFound

from app import annual_limit_client, bounce_rate_client, notify_celery, statsd_client
from app.config import QueueNames
from app.dao import notifications_dao
from app.dao.fact_notification_status_dao import (
fetch_notification_status_for_service_for_day,
)
from app.models import NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE
from app.notifications.callbacks import _check_and_queue_callback_task
from app.notifications.notifications_ses_callback import (
_check_and_queue_complaint_callback_task,
get_aws_responses,
handle_complaint,
)
from app.utils import prepare_notification_counts_for_seeding
from celery.exceptions import Retry


Expand Down Expand Up @@ -88,32 +83,14 @@ def process_ses_results(self, response): # noqa: C901
bounce_response=aws_response_dict.get("bounce_response", None),
)

service_id = notification.service_id
# Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification
# is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count.
notifications_to_seed = None
# Check if we have already seeded the annual limit counts for today
if current_app.config["FF_ANNUAL_LIMIT"]:
if not annual_limit_client.was_seeded_today(service_id):
annual_limit_client.set_seeded_at(service_id)
notifications_to_seed = fetch_notification_status_for_service_for_day(
convert_utc_to_local_timezone(datetime.utcnow()),
service_id=service_id,
)
annual_limit_client.seed_annual_limit_notifications(
service_id, prepare_notification_counts_for_seeding(notifications_to_seed)
)

if not aws_response_dict["success"]:
current_app.logger.info(
"SES delivery failed: notification id {} and reference {} has error found. Status {}".format(
notification.id, reference, aws_response_dict["message"]
)
)
if current_app.config["FF_ANNUAL_LIMIT"]:
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_email_failed(notification.service_id)
annual_limit_client.increment_email_failed(notification.service_id)
current_app.logger.info(
f"Incremented email_failed count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
Expand All @@ -122,9 +99,7 @@ def process_ses_results(self, response): # noqa: C901
"SES callback return status of {} for notification: {}".format(notification_status, notification.id)
)
if current_app.config["FF_ANNUAL_LIMIT"]:
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_email_delivered(notification.service_id)
annual_limit_client.increment_email_delivered(notification.service_id)
current_app.logger.info(
f"Incremented email_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
Expand Down
28 changes: 2 additions & 26 deletions app/celery/process_sns_receipts_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,11 @@

from flask import current_app, json
from notifications_utils.statsd_decorators import statsd
from notifications_utils.timezones import convert_utc_to_local_timezone
from sqlalchemy.orm.exc import NoResultFound

from app import annual_limit_client, notify_celery, statsd_client
from app.config import QueueNames
from app.dao import notifications_dao
from app.dao.fact_notification_status_dao import (
fetch_notification_status_for_service_for_day,
)
from app.models import (
NOTIFICATION_DELIVERED,
NOTIFICATION_PERMANENT_FAILURE,
Expand All @@ -20,7 +16,6 @@
SNS_PROVIDER,
)
from app.notifications.callbacks import _check_and_queue_callback_task
from app.utils import prepare_notification_counts_for_seeding
from celery.exceptions import Retry


Expand Down Expand Up @@ -69,21 +64,6 @@ def process_sns_results(self, response):
provider_response=provider_response,
)

service_id = notification.service_id
# Flags if seeding has occurred. Since we seed after updating the notification status in the DB then the current notification
# is included in the fetch_notification_status_for_service_for_day call below, thus we don't need to increment the count.
notifications_to_seed = None
if current_app.config["FF_ANNUAL_LIMIT"]:
if not annual_limit_client.was_seeded_today(service_id):
annual_limit_client.set_seeded_at(service_id)
notifications_to_seed = fetch_notification_status_for_service_for_day(
convert_utc_to_local_timezone(datetime.utcnow()),
service_id=service_id,
)
annual_limit_client.seed_annual_limit_notifications(
service_id, prepare_notification_counts_for_seeding(notifications_to_seed)
)

if notification_status != NOTIFICATION_DELIVERED:
current_app.logger.info(
(
Expand All @@ -93,19 +73,15 @@ def process_sns_results(self, response):
)
# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_sms_failed(notification.service_id)
annual_limit_client.increment_sms_failed(notification.service_id)
current_app.logger.info(
f"Incremented sms_failed count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
else:
current_app.logger.info(f"SNS callback return status of {notification_status} for notification: {notification.id}")
# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
# Only increment if we didn't just seed.
if notifications_to_seed is None:
annual_limit_client.increment_sms_delivered(notification.service_id)
annual_limit_client.increment_sms_delivered(notification.service_id)
current_app.logger.info(
f"Incremented sms_delivered count in Redis. Service: {notification.service_id} Notification: {notification.id} Current counts: {annual_limit_client.get_all_notification_counts(notification.service_id)}"
)
Expand Down
6 changes: 1 addition & 5 deletions app/celery/reporting_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from notifications_utils.statsd_decorators import statsd
from notifications_utils.timezones import convert_utc_to_local_timezone

from app import annual_limit_client, notify_celery
from app import notify_celery
from app.config import QueueNames
from app.cronitor import cronitor
from app.dao.annual_limits_data_dao import get_previous_quarter, insert_quarter_data
Expand Down Expand Up @@ -113,10 +113,6 @@ def create_nightly_notification_status_for_day(process_day):
len(transit_data), process_day, chunk
)
)
# TODO: FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
annual_limit_client.reset_all_notification_counts(chunk)

except Exception as e:
current_app.logger.error(
"create-nightly-notification-status-for-day task failed for day: {}, for service_ids: {}. Error: {}".format(
Expand Down
20 changes: 7 additions & 13 deletions app/dao/fact_notification_status_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,26 +130,20 @@ def update_fact_notification_status(data, process_day, service_ids=None):


def fetch_notification_status_for_service_by_month(start_date, end_date, service_id):
filters = [
FactNotificationStatus.service_id == service_id,
FactNotificationStatus.bst_date >= start_date.strftime("%Y-%m-%d"),
# This works only for timezones to the west of GMT
FactNotificationStatus.bst_date < end_date.strftime("%Y-%m-%d"),
FactNotificationStatus.key_type != KEY_TYPE_TEST,
]

# TODO FF_ANNUAL_LIMIT removal
if current_app.config["FF_ANNUAL_LIMIT"]:
filters.append(FactNotificationStatus.bst_date != datetime.utcnow().date().strftime("%Y-%m-%d"))

return (
db.session.query(
func.date_trunc("month", FactNotificationStatus.bst_date).label("month"),
FactNotificationStatus.notification_type,
FactNotificationStatus.notification_status,
func.sum(FactNotificationStatus.notification_count).label("count"),
)
.filter(*filters)
.filter(
FactNotificationStatus.service_id == service_id,
FactNotificationStatus.bst_date >= start_date.strftime("%Y-%m-%d"),
# This works only for timezones to the west of GMT
FactNotificationStatus.bst_date < end_date.strftime("%Y-%m-%d"),
FactNotificationStatus.key_type != KEY_TYPE_TEST,
)
.group_by(
func.date_trunc("month", FactNotificationStatus.bst_date).label("month"),
FactNotificationStatus.notification_type,
Expand Down
8 changes: 2 additions & 6 deletions app/service/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,10 +404,7 @@ def create_api_key(service_id=None):
unsigned_api_key = get_unsigned_secret(valid_api_key.id)

# prefix the API key so they keys can be easily identified for security scanning
keydata = {
"key": unsigned_api_key,
"key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name,
}
keydata = {"key": unsigned_api_key, "key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name}

return jsonify(data=keydata), 201

Expand Down Expand Up @@ -634,8 +631,7 @@ def get_monthly_notification_stats(service_id):
statistics.add_monthly_notification_status_stats(data, stats)

now = datetime.utcnow()
# TODO FF_ANNUAL_LIMIT removal
if not current_app.config["FF_ANNUAL_LIMIT"] and end_date > now:
if end_date > now:
todays_deltas = fetch_notification_status_for_service_for_day(convert_utc_to_local_timezone(now), service_id=service_id)
statistics.add_monthly_notification_status_stats(data, todays_deltas)

Expand Down
35 changes: 0 additions & 35 deletions app/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,6 @@

local_timezone = pytz.timezone(os.getenv("TIMEZONE", "America/Toronto"))

DELIVERED_STATUSES = ["delivered", "sent", "returned-letter"]
FAILURE_STATUSES = [
"failed",
"temporary-failure",
"permanent-failure",
"technical-failure",
"virus-scan-failed",
"validation-failed",
]


def pagination_links(pagination, endpoint, **kwargs):
if "page" in kwargs:
Expand Down Expand Up @@ -231,28 +221,3 @@ def get_limit_reset_time_et() -> dict[str, str]:

limit_reset_time_et = {"12hr": next_midnight_utc_in_et.strftime("%-I%p"), "24hr": next_midnight_utc_in_et.strftime("%H")}
return limit_reset_time_et


def prepare_notification_counts_for_seeding(notification_counts: list) -> dict:
"""Utility method that transforms a list of notification counts into a dictionary, mapping notification counts by type and success/failure.
Used to seed notification counts in Redis for annual limits.
e.g.
```
[(datetime, 'email', 'sent', 1),
(datetime, 'sms', 'sent', 2)]
```
Becomes:
```
{'email_sent': 1, 'sms_sent': 2}
```
Args:
notification_counts (list): A list of tuples containing (date, notification_type, status, count)
Returns:
dict: That acts as a mapping to build the notification counts in Redis
"""
return {
f"{notification_type}_{'delivered' if status in DELIVERED_STATUSES else 'failed'}": count
for _, notification_type, status, count in notification_counts
if status in DELIVERED_STATUSES or status in FAILURE_STATUSES
}
8 changes: 4 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ Werkzeug = "3.0.4"
MarkupSafe = "2.1.5"
# REVIEW: v2 is using sha512 instead of sha1 by default (in v1)
itsdangerous = "2.2.0"
notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.7" }
notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.3.6" }

# rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8
typing-extensions = "4.12.2"
Expand Down
Loading

0 comments on commit ffa8dbd

Please sign in to comment.