Skip to content

Commit

Permalink
Decoupled scan malware code + lowering retry period for high priority…
Browse files Browse the repository at this point in the history
… emails
  • Loading branch information
jimleroyer committed Nov 20, 2023
1 parent 3bf74e5 commit 8d4a8f8
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 11 deletions.
27 changes: 21 additions & 6 deletions app/celery/provider_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,17 +81,16 @@ def deliver_email(self, notification_id):
_check_and_queue_callback_task(notification)
except MalwareDetectedException:
_check_and_queue_callback_task(notification)
except Exception as e:
if isinstance(e, MalwareScanInProgressException) and self.request.retries <= SCAN_MAX_BACKOFF_RETRIES:
countdown = SCAN_RETRY_BACKOFF * (self.request.retries + 1) # do we need to add 1 here?
except MalwareScanInProgressException:
if self.request.retries <= SCAN_MAX_BACKOFF_RETRIES:
countdown = SCAN_RETRY_BACKOFF * (self.request.retries + 1)
else:
countdown = None
try:
current_app.logger.warning(f"The exception is {repr(e)}")
if self.request.retries <= 10:
current_app.logger.warning("RETRY {}: Email notification {} failed".format(self.request.retries, notification_id))
current_app.logger.warning("RETRY {}: Email notification {} is waiting on pending malware scanning".format(self.request.retries, notification_id))
else:
current_app.logger.exception("RETRY: Email notification {} failed".format(notification_id))
current_app.logger.exception("RETRY: Email notification {} failed on pending malware scanning".format(notification_id))
if countdown is not None:
self.retry(queue=QueueNames.RETRY, countdown=countdown)
else:
Expand All @@ -105,6 +104,22 @@ def deliver_email(self, notification_id):
update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE)
_check_and_queue_callback_task(notification)
raise NotificationTechnicalFailureException(message)
except Exception as e:
try:
if self.request.retries <= 10:
current_app.logger.warning("RETRY {}: Email notification {} failed".format(self.request.retries, notification_id))
else:
current_app.logger.exception("RETRY: Email notification {} failed".format(notification_id), exc_info=e)
self.retry(**build_retry_task_params(notification.notification_type, notification.template.process_type))
except self.MaxRetriesExceededError:
message = (
"RETRY FAILED: Max retries reached. "
"The task send_email_to_provider failed for notification {}. "
"Notification has been updated to technical-failure".format(notification_id)
)
update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE)
_check_and_queue_callback_task(notification)
raise NotificationTechnicalFailureException(message)


def _deliver_sms(self, notification_id):
Expand Down
5 changes: 1 addition & 4 deletions app/notifications/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,5 @@ def build_retry_task_params(notification_type: str, notification_process_type: s

# Overring the retry policy is only supported for SMS for now;
# email support coming later.
if notification_type == SMS_TYPE:
params["countdown"] = RETRY_PERIODS[notification_process_type]
else:
params["countdown"] = RETRY_DEFAULT
params["countdown"] = RETRY_PERIODS[notification_process_type]
return params
1 change: 0 additions & 1 deletion app/notifications/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ def _check_and_queue_callback_task(notification):
service_callback_api = get_service_delivery_status_callback_api_for_service(service_id=notification.service_id)
if service_callback_api:
notification_data = create_delivery_status_callback_data(notification, service_callback_api)

send_delivery_status_to_service.apply_async([str(notification.id), notification_data], queue=QueueNames.CALLBACKS)


Expand Down

0 comments on commit 8d4a8f8

Please sign in to comment.