Skip to content

Commit

Permalink
Merge branch 'main' into smoke-loop
Browse files Browse the repository at this point in the history
  • Loading branch information
ben851 authored Dec 7, 2023
2 parents 59d81bc + e8f19bd commit 5c3b50f
Show file tree
Hide file tree
Showing 28 changed files with 1,574 additions and 1,420 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,15 @@ jobs:
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1

- name: Initialize CodeQL
uses: github/codeql-action/init@66b90a5db151a8042fa97405c6cf843bbe433f7b # v2.22.7
uses: github/codeql-action/init@407ffafae6a767df3e0230c3df91b6443ae8df75 # v2.22.8
with:
languages: ${{ matrix.language }}
queries: +security-and-quality

- name: Autobuild
uses: github/codeql-action/autobuild@66b90a5db151a8042fa97405c6cf843bbe433f7b # v2.22.7
uses: github/codeql-action/autobuild@407ffafae6a767df3e0230c3df91b6443ae8df75 # v2.22.8

- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@66b90a5db151a8042fa97405c6cf843bbe433f7b # v2.22.7
uses: github/codeql-action/analyze@407ffafae6a767df3e0230c3df91b6443ae8df75 # v2.22.8
with:
category: "/language:${{ matrix.language }}"
20 changes: 18 additions & 2 deletions .github/workflows/docker.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,26 @@ jobs:
- name: Update images in staging
run: |
kubectl set image deployment.apps/api api=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery celery=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-beat celery-beat=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-sms celery-sms=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-sms-send celery-sms-send=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-primary celery-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-scalable celery-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-sms-send-primary celery-sms-send-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-sms-send-scalable celery-sms-send-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-email-send-primary celery-email-send-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-email-send-scalable celery-email-send-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
- name: Restart deployments in staging
run: |
kubectl rollout restart deployment/api -n notification-canada-ca
kubectl rollout restart deployment/celery-beat -n notification-canada-ca
kubectl rollout restart deployment/celery-sms -n notification-canada-ca
kubectl rollout restart deployment/celery-primary -n notification-canada-ca
kubectl rollout restart deployment/celery-scalable -n notification-canada-ca
kubectl rollout restart deployment/celery-sms-send-primary -n notification-canada-ca
kubectl rollout restart deployment/celery-sms-send-scalable -n notification-canada-ca
kubectl rollout restart deployment/celery-email-send-primary -n notification-canada-ca
kubectl rollout restart deployment/celery-email-send-scalable -n notification-canada-ca
- name: my-app-install token
id: notify-pr-bot
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/performance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
run: sudo apt-get update && sudo apt-get install libssl-dev libcurl4-openssl-dev
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up Python 3.10
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa # v4.8.0
with:
python-version: '3.10'
- name: Upgrade pip
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
run: sudo apt-get update && sudo apt-get install libssl-dev libcurl4-openssl-dev
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up Python 3.10
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1
uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa # v4.8.0
with:
python-version: '3.10'
- name: Upgrade pip
Expand Down Expand Up @@ -67,7 +67,7 @@ jobs:
run: |
cp -f .env.example .env
- name: Checks for new endpoints against AWS WAF rules
uses: cds-snc/notification-utils/.github/actions/waffles@b50a26d4a2c1844369640aa9de399d0b02e896b5 # 52.0.12
uses: cds-snc/notification-utils/.github/actions/waffles@4a4ba9d51af71d493f873b977bc0561b121ae51e # 52.0.15
with:
app-loc: '/github/workspace'
app-libs: '/github/workspace/env/site-packages'
Expand Down
48 changes: 15 additions & 33 deletions app/api_key/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
get_last_send_for_api_key,
get_total_notifications_sent_for_api_key,
)
from app.dao.services_dao import dao_fetch_service_by_id
from app.errors import InvalidRequest, register_errors
from app.service.sender import send_notification_to_service_users

api_key_blueprint = Blueprint("api_key", __name__)
register_errors(api_key_blueprint)
Expand Down Expand Up @@ -72,38 +74,18 @@ def get_api_keys_ranked(n_days_back):
return jsonify(data=data)


def send_api_key_revokation_email(service_id, api_key_name, api_key_information):
"""
TODO: this function if not ready yet. It needs a template to be created.
email = email_data_request_schema.load(request.get_json())
users_to_send_to = dao_fetch_active_users_for_service(service_id)
template = dao_get_template_by_id(current_app.config["API_KEY_REVOKED_TEMPLATE_ID"]) # this template currently doesn't exist
service = Service.query.get(current_app.config["NOTIFY_SERVICE_ID"])
users_service = Service.query.get(service_id)
for user_to_send_to in users_to_send_to:
saved_notification = persist_notification(
template_id=template.id,
template_version=template.version,
recipient=email["email"],
service=service,
personalisation={
"user_name": user_to_send_to.name,
"api_key_name": api_key_name,
"service_name": users_service.name,
"api_key_information": api_key_information,
},
notification_type=template.template_type,
api_key_id=None,
key_type=KEY_TYPE_NORMAL,
reply_to_text=service.get_default_reply_to_email_address(),
)
send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY)
"""
return
def send_api_key_revocation_email(service_id, api_key_name, api_key_information):
service = dao_fetch_service_by_id(service_id)
send_notification_to_service_users(
service_id=service_id,
template_id=current_app.config["APIKEY_REVOKE_TEMPLATE_ID"],
personalisation={
"service_name": service.name,
"public_location": api_key_information["url"],
"key_name": api_key_name,
},
include_user_fields=["name"],
)


@sre_tools_blueprint.route("/api-key-revoke", methods=["POST"])
Expand Down Expand Up @@ -168,6 +150,6 @@ def revoke_api_keys():
)

# Step 4
send_api_key_revokation_email(api_key.service_id, api_key.name, api_key_data)
send_api_key_revocation_email(api_key.service_id, api_key.name, api_key_data)

return jsonify(result="ok"), 201
10 changes: 9 additions & 1 deletion app/celery/process_ses_receipts_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,5 +96,13 @@ def process_ses_results(self, response):
raise

except Exception as e:
current_app.logger.exception("Error processing SES results: {}".format(type(e)))
notifcation_msg = "Notification ID: {}".format(notification.id) if notification else "No notification"
notification_status_msg = (
"Notification status: {}".format(notification_status) if notification_status else "No notification status"
)
ref_msg = "Reference ID: {}".format(reference) if reference else "No reference"

current_app.logger.exception(
"Error processing SES results: {} [{}, {}, {}]".format(type(e), notifcation_msg, notification_status_msg, ref_msg)
)
self.retry(queue=QueueNames.RETRY)
2 changes: 1 addition & 1 deletion app/celery/provider_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def deliver_sms(self, notification_id):
@statsd(namespace="tasks")
def deliver_email(self, notification_id):
try:
current_app.logger.info("Start sending email for notification id: {}".format(notification_id))
current_app.logger.debug("Start sending email for notification id: {}".format(notification_id))
notification = notifications_dao.get_notification_by_id(notification_id)
if not notification:
raise NoResultFound()
Expand Down
14 changes: 7 additions & 7 deletions app/celery/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,12 +277,12 @@ def save_smss(self, service_id: Optional[str], signed_notifications: List[Signed
try:
# If the data is not present in the encrypted data then fallback on whats needed for process_job.
saved_notifications = persist_notifications(verified_notifications)
current_app.logger.info(
current_app.logger.debug(
f"Saved following notifications into db: {notification_id_queue.keys()} associated with receipt {receipt}"
)
if receipt:
acknowledge_receipt(SMS_TYPE, process_type, receipt)
current_app.logger.info(
current_app.logger.debug(
f"Batch saving: receipt_id {receipt} removed from buffer queue for notification_id {notification_id} for process_type {process_type}"
)
else:
Expand All @@ -297,7 +297,7 @@ def save_smss(self, service_id: Optional[str], signed_notifications: List[Signed
signed_and_verified = list(zip(signed_notifications, verified_notifications))
handle_batch_error_and_forward(self, signed_and_verified, SMS_TYPE, e, receipt, template)

current_app.logger.info(f"Sending following sms notifications to AWS: {notification_id_queue.keys()}")
current_app.logger.debug(f"Sending following sms notifications to AWS: {notification_id_queue.keys()}")
for notification_obj in saved_notifications:
try:
if not current_app.config["FF_EMAIL_DAILY_LIMIT"]:
Expand Down Expand Up @@ -383,7 +383,7 @@ def save_emails(self, _service_id: Optional[str], signed_notifications: List[Sig
try:
# If the data is not present in the encrypted data then fallback on whats needed for process_job
saved_notifications = persist_notifications(verified_notifications)
current_app.logger.info(
current_app.logger.debug(
f"Saved following notifications into db: {notification_id_queue.keys()} associated with receipt {receipt}"
)
if receipt:
Expand All @@ -392,7 +392,7 @@ def save_emails(self, _service_id: Optional[str], signed_notifications: List[Sig
# at this point in the code we have a list of notifications (saved_notifications)
# which could use multiple templates
acknowledge_receipt(EMAIL_TYPE, process_type, receipt)
current_app.logger.info(
current_app.logger.debug(
f"Batch saving: receipt_id {receipt} removed from buffer queue for notification_id {notification_id} for process_type {process_type}"
)
else:
Expand All @@ -415,7 +415,7 @@ def try_to_send_notifications_to_queue(notification_id_queue, service, saved_not
Loop through saved_notifications, check if the service has hit their daily rate limit,
and if not, call send_notification_to_queue on notification
"""
current_app.logger.info(f"Sending following email notifications to AWS: {notification_id_queue.keys()}")
current_app.logger.debug(f"Sending following email notifications to AWS: {notification_id_queue.keys()}")
# todo: fix this potential bug
# service is whatever it was set to last in the for loop above.
# at this point in the code we have a list of notifications (saved_notifications)
Expand Down Expand Up @@ -785,7 +785,7 @@ def acknowledge_receipt(notification_type: Any, process_type: Any, receipt: UUID
):
return
else:
current_app.logger.error(f"acknowledge_receipt: receipt {receipt} not found in any queue")
current_app.logger.warning(f"acknowledge_receipt: receipt {receipt} not found in any queue")


@notify_celery.task(name="seed-bounce-rate-in-redis")
Expand Down
1 change: 1 addition & 0 deletions app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,7 @@ class Config(object):
NEAR_DAILY_EMAIL_LIMIT_TEMPLATE_ID = "9aa60ad7-2d7f-46f0-8cbe-2bac3d4d77d8"
REACHED_DAILY_EMAIL_LIMIT_TEMPLATE_ID = "ee036547-e51b-49f1-862b-10ea982cfceb"
DAILY_EMAIL_LIMIT_UPDATED_TEMPLATE_ID = "97dade64-ea8d-460f-8a34-900b74ee5eb0"
APIKEY_REVOKE_TEMPLATE_ID = "a0a4e7b8-8a6a-4eaa-9f4e-9c3a5b2dbcf3"

# Allowed service IDs able to send HTML through their templates.
ALLOW_HTML_SERVICE_IDS: List[str] = [id.strip() for id in os.getenv("ALLOW_HTML_SERVICE_IDS", "").split(",")]
Expand Down
4 changes: 2 additions & 2 deletions app/dao/services_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,7 @@ def fetch_todays_total_message_count(service_id):
def fetch_todays_total_sms_count(service_id):
midnight = get_midnight(datetime.now(tz=pytz.utc))
result = (
db.session.query(func.sum(Notification.billable_units).label("sum_billable_units"))
db.session.query(func.count(Notification.id).label("total_sms_notifications"))
.filter(
Notification.service_id == service_id,
Notification.key_type != KEY_TYPE_TEST,
Expand All @@ -452,7 +452,7 @@ def fetch_todays_total_sms_count(service_id):
)
.first()
)
return 0 if result is None or result.sum_billable_units is None else result.sum_billable_units
return 0 if result is None or result.total_sms_notifications is None else result.total_sms_notifications


def fetch_service_email_limit(service_id: uuid.UUID) -> int:
Expand Down
2 changes: 1 addition & 1 deletion app/delivery/send_to_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def check_service_over_bounce_rate(service_id: str):
bounce_rate = bounce_rate_client.get_bounce_rate(service_id)
bounce_rate_status = bounce_rate_client.check_bounce_rate_status(service_id)
debug_data = bounce_rate_client.get_debug_data(service_id)
current_app.logger.info(
current_app.logger.debug(
f"Service id: {service_id} Bounce Rate: {bounce_rate} Bounce Status: {bounce_rate_status}, Debug Data: {debug_data}"
)
if bounce_rate_status == BounceRateStatus.CRITICAL.value:
Expand Down
4 changes: 2 additions & 2 deletions app/job/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def create_job(service_id):
is_test_notification = len(list(recipient_csv.get_rows())) == numberOfSimulated

if not is_test_notification:
check_sms_daily_limit(service, recipient_csv.sms_fragment_count)
check_sms_daily_limit(service, len(recipient_csv))

if template.template_type == EMAIL_TYPE:
check_email_daily_limit(service, len(list(recipient_csv.get_rows())))
Expand All @@ -183,7 +183,7 @@ def create_job(service_id):
raise InvalidRequest(errors, status_code=400)

if template.template_type == SMS_TYPE and not is_test_notification:
increment_sms_daily_count_send_warnings_if_needed(service, recipient_csv.sms_fragment_count)
increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv))
elif template.template_type == EMAIL_TYPE:
increment_email_daily_count_send_warnings_if_needed(service, len(list(recipient_csv.get_rows())))

Expand Down
4 changes: 4 additions & 0 deletions app/service/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@
from notifications_utils.clients.redis import (
daily_limit_cache_key,
near_daily_limit_cache_key,
near_email_daily_limit_cache_key,
near_sms_daily_limit_cache_key,
over_daily_limit_cache_key,
over_email_daily_limit_cache_key,
over_sms_daily_limit_cache_key,
)
from notifications_utils.letter_timings import letter_can_be_cancelled
Expand Down Expand Up @@ -303,6 +305,8 @@ def update_service(service_id):
redis_store.delete(daily_limit_cache_key(service_id))
redis_store.delete(near_daily_limit_cache_key(service_id))
redis_store.delete(over_daily_limit_cache_key(service_id))
redis_store.delete(near_email_daily_limit_cache_key(service_id))
redis_store.delete(over_email_daily_limit_cache_key(service_id))
if not fetched_service.restricted:
_warn_service_users_about_message_limit_changed(service_id, current_data)
if sms_limit_changed:
Expand Down
4 changes: 2 additions & 2 deletions app/service/send_notification.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def send_one_off_notification(service_id, post_data):
if template.template_type == SMS_TYPE:
is_test_notification = simulated_recipient(post_data["to"], template.template_type)
if not is_test_notification:
check_sms_daily_limit(service, template_with_content.fragment_count)
check_sms_daily_limit(service, 1)
elif template.template_type == EMAIL_TYPE and current_app.config["FF_EMAIL_DAILY_LIMIT"]:
check_email_daily_limit(service, 1) # 1 email

Expand All @@ -91,7 +91,7 @@ def send_one_off_notification(service_id, post_data):
if template.template_type == SMS_TYPE:
is_test_notification = simulated_recipient(post_data["to"], template.template_type)
if not is_test_notification:
increment_sms_daily_count_send_warnings_if_needed(service, template_with_content.fragment_count)
increment_sms_daily_count_send_warnings_if_needed(service, 1)
elif template.template_type == EMAIL_TYPE and current_app.config["FF_EMAIL_DAILY_LIMIT"]:
increment_email_daily_count_send_warnings_if_needed(service, 1) # 1 email

Expand Down
10 changes: 5 additions & 5 deletions app/sms_fragment_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@ def fetch_todays_requested_sms_count(service_id: UUID) -> int:
return fetch_todays_total_sms_count(service_id)

cache_key = sms_daily_count_cache_key(service_id)
fragment_count = redis_store.get(cache_key)
if fragment_count is None:
fragment_count = fetch_todays_total_sms_count(service_id)
redis_store.set(cache_key, fragment_count, ex=int(timedelta(hours=2).total_seconds()))
return int(fragment_count)
sms_count = redis_store.get(cache_key)
if sms_count is None:
sms_count = fetch_todays_total_sms_count(service_id)
redis_store.set(cache_key, sms_count, ex=int(timedelta(hours=2).total_seconds()))
return int(sms_count)


def increment_todays_requested_sms_count(service_id: UUID, increment_by: int):
Expand Down
Loading

0 comments on commit 5c3b50f

Please sign in to comment.