From d1d851dd79ca2d1d3fe650a74cc14f64e977518f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 21 May 2024 13:14:12 -0300 Subject: [PATCH 01/15] fix(deps): update all patch dependencies (#2104) * fix(deps): update all patch dependencies * Bump utils version * Add typing for salesforce client - Salesforce 1.12.6 introduced typing to the project so we need to conform or mypy gets upset * formatting --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: wbanks --- app/clients/salesforce/salesforce_account.py | 2 +- app/clients/salesforce/salesforce_auth.py | 6 +- app/clients/salesforce/salesforce_client.py | 8 +- app/clients/salesforce/salesforce_contact.py | 12 +- .../salesforce/salesforce_engagement.py | 46 ++-- app/clients/salesforce/salesforce_utils.py | 13 +- poetry.lock | 217 +++++++----------- pyproject.toml | 10 +- tests_cypress/package-lock.json | 154 ++++--------- 9 files changed, 185 insertions(+), 283 deletions(-) diff --git a/app/clients/salesforce/salesforce_account.py b/app/clients/salesforce/salesforce_account.py index 3002edb416..ec942b9259 100644 --- a/app/clients/salesforce/salesforce_account.py +++ b/app/clients/salesforce/salesforce_account.py @@ -32,7 +32,7 @@ def get_org_name_from_notes(organisation_notes: str, name_index: int = ORG_NOTES return organisation_notes -def get_account_id_from_name(session: Salesforce, account_name: str, generic_account_id: str) -> Optional[str]: +def get_account_id_from_name(session: Optional[Salesforce], account_name: str, generic_account_id: str) -> Optional[str]: """Returns the Account ID for the given Account Name. If no match is found, a generic Account not found ID is returned. diff --git a/app/clients/salesforce/salesforce_auth.py b/app/clients/salesforce/salesforce_auth.py index 181c6550b9..3fbcdbdf3b 100644 --- a/app/clients/salesforce/salesforce_auth.py +++ b/app/clients/salesforce/salesforce_auth.py @@ -1,3 +1,5 @@ +from typing import Optional + import requests from flask import current_app from simple_salesforce import Salesforce @@ -13,7 +15,7 @@ def send(self, *args, **kwargs): return super().send(*args, **kwargs) -def get_session(client_id: str, username: str, password: str, security_token: str, domain: str) -> Salesforce: +def get_session(client_id: str, username: str, password: str, security_token: str, domain: str) -> Optional[Salesforce]: """Return an authenticated Salesforce session Args: @@ -46,7 +48,7 @@ def get_session(client_id: str, username: str, password: str, security_token: st return session -def end_session(session: Salesforce): +def end_session(session: Optional[Salesforce]): """Logout of a Salesforce session Args: diff --git a/app/clients/salesforce/salesforce_client.py b/app/clients/salesforce/salesforce_client.py index 057cf76413..e320645df9 100644 --- a/app/clients/salesforce/salesforce_client.py +++ b/app/clients/salesforce/salesforce_client.py @@ -28,7 +28,7 @@ def init_app(self, app): # # Authentication # - def get_session(self) -> Salesforce: + def get_session(self) -> Optional[Salesforce]: """Returns an authenticated Salesforce session. Returns: @@ -36,7 +36,7 @@ def get_session(self) -> Salesforce: """ return salesforce_auth.get_session(self.client_id, self.username, self.password, self.security_token, self.domain) - def end_session(self, session: Salesforce) -> None: + def end_session(self, session: Optional[Salesforce]) -> None: """Revokes a Salesforce session. Args: @@ -73,7 +73,9 @@ def contact_update(self, user: User) -> None: salesforce_contact.update(session, user, user_updates) self.end_session(session) - def contact_update_account_id(self, session: Salesforce, service: Service, user: User) -> Tuple[Optional[str], Optional[str]]: + def contact_update_account_id( + self, session: Optional[Salesforce], service: Service, user: User + ) -> Tuple[Optional[str], Optional[str]]: """Updates the Account ID for the given Notify user's Salesforce Contact. The Salesforce Account ID and Contact ID are returned. diff --git a/app/clients/salesforce/salesforce_contact.py b/app/clients/salesforce/salesforce_contact.py index a8397eb445..a982c39c93 100644 --- a/app/clients/salesforce/salesforce_contact.py +++ b/app/clients/salesforce/salesforce_contact.py @@ -16,7 +16,7 @@ from app.models import User -def create(session: Salesforce, user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: +def create(session: Optional[Salesforce], user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: """Create a Salesforce Contact from the given Notify User Args: @@ -38,7 +38,7 @@ def create(session: Salesforce, user: User, field_updates: dict[str, Optional[st "Email": user.email_address, } field_values = field_default_values | field_updates - result = session.Contact.create( + result = session.Contact.create( # type: ignore field_values, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, ) @@ -50,7 +50,7 @@ def create(session: Salesforce, user: User, field_updates: dict[str, Optional[st return contact_id -def update(session: Salesforce, user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: +def update(session: Optional[Salesforce], user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: """Update a Contact's details. If the Contact does not exist, it is created. Args: @@ -67,8 +67,8 @@ def update(session: Salesforce, user: User, field_updates: dict[str, Optional[st # Existing contact, update the AccountID if contact: - result = session.Contact.update( - contact.get("Id"), field_updates, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"} + result = session.Contact.update( # type:ignore + str(contact.get("Id")), field_updates, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"} ) parse_result(result, f"Salesforce Contact update '{user.email_address}' with '{field_updates}'") contact_id = contact.get("Id") @@ -81,7 +81,7 @@ def update(session: Salesforce, user: User, field_updates: dict[str, Optional[st return contact_id -def get_contact_by_user_id(session: Salesforce, user_id: str) -> Optional[dict[str, str]]: +def get_contact_by_user_id(session: Optional[Salesforce], user_id: str) -> Optional[dict[str, str]]: """Retrieve a Salesforce Contact by their Notify user ID. If they can't be found, `None` is returned. diff --git a/app/clients/salesforce/salesforce_engagement.py b/app/clients/salesforce/salesforce_engagement.py index 1637311fa3..3462ceae7b 100644 --- a/app/clients/salesforce/salesforce_engagement.py +++ b/app/clients/salesforce/salesforce_engagement.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any, Dict, Optional from flask import current_app from simple_salesforce import Salesforce @@ -22,7 +22,11 @@ def create( - session: Salesforce, service: Service, field_updates: dict[str, str], account_id: Optional[str], contact_id: Optional[str] + session: Optional[Salesforce], + service: Service, + field_updates: dict[str, str], + account_id: Optional[str], + contact_id: Optional[str], ) -> Optional[str]: """Create a Salesforce Engagement for the given Notify service @@ -38,7 +42,7 @@ def create( """ engagement_id = None try: - if account_id and contact_id: + if account_id and contact_id and session: # Default Engagement values, which can be overridden by passing in field_updates field_default_values = { "Name": service.name, @@ -54,7 +58,7 @@ def create( "Product_to_Add__c": ENGAGEMENT_PRODUCT, } field_values = field_default_values | field_updates - result = session.Opportunity.create( + result = session.Opportunity.create( # type: ignore engagement_maxlengths(field_values), headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, ) @@ -63,7 +67,7 @@ def create( # Create the Product association if engagement_id: - result = session.OpportunityLineItem.create( + result = session.OpportunityLineItem.create( # type: ignore { "OpportunityId": engagement_id, "PricebookEntryId": current_app.config["SALESFORCE_ENGAGEMENT_STANDARD_PRICEBOOK_ID"], @@ -76,7 +80,7 @@ def create( parse_result(result, f"Salesforce Engagement OpportunityLineItem create for service ID {service.id}") else: current_app.logger.error( - f"SF_ERR Salesforce Engagement create failed: missing Account ID '{account_id}' or Contact ID '{contact_id}' for service ID {service.id}" + f"SF_ERR Salesforce Engagement create failed: missing Account ID '{account_id}' or Contact ID '{contact_id}' for service ID {service.id} or the session is not available. '{session}'" ) except Exception as ex: current_app.logger.error(f"SF_ERR Salesforce Engagement create failed: {ex}") @@ -84,7 +88,11 @@ def create( def update( - session: Salesforce, service: Service, field_updates: dict[str, str], account_id: Optional[str], contact_id: Optional[str] + session: Optional[Salesforce], + service: Service, + field_updates: dict[str, str], + account_id: Optional[str], + contact_id: Optional[str], ) -> Optional[str]: """Update an Engagement. If the Engagement does not exist, it is created. @@ -104,8 +112,8 @@ def update( # Existing Engagement, update the stage name if engagement: - result = session.Opportunity.update( - engagement.get("Id"), + result = session.Opportunity.update( # type: ignore + str(engagement.get("Id")), engagement_maxlengths(field_updates), headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, ) @@ -120,7 +128,9 @@ def update( return engagement_id -def contact_role_add(session: Salesforce, service: Service, account_id: Optional[str], contact_id: Optional[str]) -> None: +def contact_role_add( + session: Optional[Salesforce], service: Service, account_id: Optional[str], contact_id: Optional[str] +) -> None: """Adds an Engagement ContactRole based on the provided Notify service and Contact. If the Engagement does not exist, it is created. @@ -136,7 +146,7 @@ def contact_role_add(session: Salesforce, service: Service, account_id: Optional try: engagement = get_engagement_by_service_id(session, str(service.id)) if engagement: - result = session.OpportunityContactRole.create( + result = session.OpportunityContactRole.create( # type: ignore {"ContactId": contact_id, "OpportunityId": engagement.get("Id")}, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, ) @@ -147,7 +157,9 @@ def contact_role_add(session: Salesforce, service: Service, account_id: Optional current_app.logger.error(f"SF_ERR Salesforce ContactRole add for {contact_id} with '{service.id}' failed: {ex}") -def contact_role_delete(session: Salesforce, service: Service, account_id: Optional[str], contact_id: Optional[str]) -> None: +def contact_role_delete( + session: Optional[Salesforce], service: Service, account_id: Optional[str], contact_id: Optional[str] +) -> None: """Deletes an Engagement ContactRole based on the provided Notify service and Salesforce Contact. If the Engagement does not exist, it is created. @@ -161,19 +173,19 @@ def contact_role_delete(session: Salesforce, service: Service, account_id: Optio None """ try: - result = {} + result: Dict[str, Any] = {} engagement = get_engagement_by_service_id(session, str(service.id)) engagement_id = engagement.get("Id") if engagement else create(session, service, {}, account_id, contact_id) engagement_contact_role = get_engagement_contact_role(session, engagement_id, contact_id) if engagement_contact_role: - result = session.OpportunityContactRole.delete(engagement_contact_role.get("Id")) + result = session.OpportunityContactRole.delete(engagement_contact_role.get("Id")) # type: ignore parse_result(result, f"Salesforce ContactRole delete for {contact_id} with '{service.id}'") except Exception as ex: current_app.logger.error(f"SF_ERR Salesforce ContactRole delete for {contact_id} with '{service.id}' failed: {ex}") -def get_engagement_by_service_id(session: Salesforce, service_id: str) -> Optional[dict[str, Any]]: +def get_engagement_by_service_id(session: Optional[Salesforce], service_id: str) -> Optional[dict[str, Any]]: """Retrieve a Salesforce Engagement by a Notify service ID Args: @@ -184,14 +196,14 @@ def get_engagement_by_service_id(session: Salesforce, service_id: str) -> Option Optional[dict[str, str]]: Salesforce Engagement details or None if can't be found """ result = None - if isinstance(service_id, str) and service_id.strip(): + if isinstance(service_id, str) and service_id.strip() and session is not None: query = f"SELECT Id, Name, ContactId, AccountId FROM Opportunity where CDS_Opportunity_Number__c = '{query_param_sanitize(service_id)}' LIMIT 1" result = query_one(session, query) return result def get_engagement_contact_role( - session: Salesforce, engagement_id: Optional[str], contact_id: Optional[str] + session: Optional[Salesforce], engagement_id: Optional[str], contact_id: Optional[str] ) -> Optional[dict[str, Any]]: """Retrieve a Salesforce Engagement ContactRole. diff --git a/app/clients/salesforce/salesforce_utils.py b/app/clients/salesforce/salesforce_utils.py index 9b9f270547..0cb666112a 100644 --- a/app/clients/salesforce/salesforce_utils.py +++ b/app/clients/salesforce/salesforce_utils.py @@ -26,7 +26,7 @@ def get_name_parts(full_name: str) -> dict[str, str]: } -def query_one(session: Salesforce, query: str) -> Optional[dict[str, Any]]: +def query_one(session: Optional[Salesforce], query: str) -> Optional[dict[str, Any]]: """Execute an SOQL query that expects to return a single record. Args: @@ -38,11 +38,14 @@ def query_one(session: Salesforce, query: str) -> Optional[dict[str, Any]]: """ result = None try: - results = session.query(query) - if results.get("totalSize") == 1: - result = results.get("records")[0] + if session is not None: + results = session.query(query) + if results.get("totalSize") == 1: + result = results.get("records")[0] + else: + current_app.logger.warn(f"SF_WARN Salesforce no results found for query {query}") else: - current_app.logger.warn(f"SF_WARN Salesforce no results found for query {query}") + current_app.logger.error("SF_ERR Salesforce session is None") except Exception as ex: current_app.logger.error(f"SF_ERR Salesforce query {query} failed: {ex}") return result diff --git a/poetry.lock b/poetry.lock index 5a3fc9e36d..67aa854ed1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -210,18 +210,18 @@ aiohttp = "*" [[package]] name = "awscli" -version = "1.32.89" +version = "1.32.100" description = "Universal Command Line Environment for AWS." optional = false python-versions = ">=3.8" files = [ - {file = "awscli-1.32.89-py3-none-any.whl", hash = "sha256:6928106f755312eb8126eebe317e947d64d0337d0a0f468c8ed06c724eed0286"}, - {file = "awscli-1.32.89.tar.gz", hash = "sha256:0cb9f2145b3c84e7df253ad01589e8d31412644ad83c9ffe4bd3e45ffc2dd9d2"}, + {file = "awscli-1.32.100-py3-none-any.whl", hash = "sha256:46e4a44dafeffe63980ab2cd0240aa15a4879cf5d84f210c9eb0facc05e7bf0a"}, + {file = "awscli-1.32.100.tar.gz", hash = "sha256:7bd06388d7853508f96a91291c28b0745ac0a5ac73276cb7db48478d6d3c2a70"}, ] [package.dependencies] -botocore = "1.34.89" -colorama = ">=0.2.5,<0.4.5" +botocore = "1.34.100" +colorama = ">=0.2.5,<0.4.7" docutils = ">=0.10,<0.17" PyYAML = ">=3.10,<6.1" rsa = ">=3.1.2,<4.8" @@ -371,17 +371,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.89" +version = "1.34.100" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.89-py3-none-any.whl", hash = "sha256:f9166f485d64b012d46acd212fb29a45b195a85ff66a645b05b06d9f7572af36"}, - {file = "boto3-1.34.89.tar.gz", hash = "sha256:e0940e43810fe82f5b77442c751491fcc2768af7e7c3e8c15ea158e1ca9b586c"}, + {file = "boto3-1.34.100-py3-none-any.whl", hash = "sha256:bbe2bb0dfcd92380da2a2fa2c2f586ba06c118b796380b2d0f3d0ebd103ec28d"}, + {file = "boto3-1.34.100.tar.gz", hash = "sha256:016f6d66900bb1a835dea2063f1e91fc7057dbf7fb7df8add0706f0da9492631"}, ] [package.dependencies] -botocore = ">=1.34.89,<1.35.0" +botocore = ">=1.34.100,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -390,13 +390,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.89" +version = "1.34.100" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.89-py3-none-any.whl", hash = "sha256:35205ed7db13058a3f7114c28e93058a8ff1490dfc6a5b5dff9c581c738fbf59"}, - {file = "botocore-1.34.89.tar.gz", hash = "sha256:6624b69bcdf2c5d0568b7bc9cbac13e605f370e7ea06710c61e2e2dc76831141"}, + {file = "botocore-1.34.100-py3-none-any.whl", hash = "sha256:ee516fb9e9e906d311f2a9921afaf79c594db239a5b4b626e89e6960401aad0b"}, + {file = "botocore-1.34.100.tar.gz", hash = "sha256:513bea60c6531af8e1ae1fdb2947e3ef99712f39c58f4656b5efef9cb6f75a13"}, ] [package.dependencies] @@ -2444,7 +2444,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.2.2" +version = "52.2.3" description = "Shared python code for Notification - Provides logging utils etc." optional = false python-versions = "~3.10.9" @@ -2452,9 +2452,9 @@ files = [] develop = false [package.dependencies] -awscli = "1.32.89" +awscli = "1.32.100" bleach = "6.1.0" -boto3 = "1.34.89" +boto3 = "1.34.100" cachetools = "4.2.4" certifi = "^2023.7.22" cryptography = "^42.0.3" @@ -2465,7 +2465,7 @@ Jinja2 = "^3.0.0" markupsafe = "2.1.5" mistune = "0.8.4" ordered-set = "4.1.0" -phonenumbers = "8.13.35" +phonenumbers = "8.13.36" py_w3c = "0.3.1" pypdf2 = "1.28.6" python-json-logger = "2.0.7" @@ -2479,8 +2479,8 @@ werkzeug = "2.3.7" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "52.2.2" -resolved_reference = "578b4147fe6c7a8f89241649b763f94ef24b2ad4" +reference = "52.2.3" +resolved_reference = "fc02dace174f93072345160787b2e603b74a984b" [[package]] name = "ordered-set" @@ -2518,49 +2518,15 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] -[[package]] -name = "pendulum" -version = "2.1.2" -description = "Python datetimes made easy" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, -] - -[package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" - [[package]] name = "phonenumbers" -version = "8.13.35" +version = "8.13.36" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.35-py2.py3-none-any.whl", hash = "sha256:58286a8e617bd75f541e04313b28c36398be6d4443a778c85e9617a93c391310"}, - {file = "phonenumbers-8.13.35.tar.gz", hash = "sha256:64f061a967dcdae11e1c59f3688649e697b897110a33bb74d5a69c3e35321245"}, + {file = "phonenumbers-8.13.36-py2.py3-none-any.whl", hash = "sha256:68e06d20ae2f8fe5c7c7fd5b433f4257bc3cc747dc5196a029c7898ea449b012"}, + {file = "phonenumbers-8.13.36.tar.gz", hash = "sha256:b4e2371e35a1172aa2c91c9200b1e48e87b9355eb575768dd38058fc8d72c9ff"}, ] [[package]] @@ -2951,6 +2917,9 @@ files = [ {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + [package.extras] crypto = ["cryptography (>=3.4.0)"] dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] @@ -3231,17 +3200,6 @@ files = [ {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - [[package]] name = "pywin32" version = "306" @@ -3606,21 +3564,20 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar [[package]] name = "simple-salesforce" -version = "1.12.5" +version = "1.12.6" description = "A basic Salesforce.com REST API client." optional = false python-versions = "*" files = [ - {file = "simple-salesforce-1.12.5.tar.gz", hash = "sha256:ef65f72438e3b215619f6835d3d4356e147adf3a7ece6896d239127dd6aefcd1"}, - {file = "simple_salesforce-1.12.5-py2.py3-none-any.whl", hash = "sha256:07029575385d04132babfd6e19c1c8068c859d616a45dab07bbf9875bdc5ab93"}, + {file = "simple-salesforce-1.12.6.tar.gz", hash = "sha256:77590606c781905f6b75430562951dd2b062438da7f55fca2b61e4cde31df15b"}, + {file = "simple_salesforce-1.12.6-py2.py3-none-any.whl", hash = "sha256:66c74bee88d09ace46e4fc9c2f6b47c0d012817a764f70a5455d6dc2c7ed635c"}, ] [package.dependencies] -cryptography = "*" more-itertools = "*" -pendulum = "*" -pyjwt = "*" +pyjwt = {version = "*", extras = ["crypto"]} requests = ">=2.22.0" +typing-extensions = "*" zeep = "*" [[package]] @@ -3646,57 +3603,57 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.51" +version = "1.4.52" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.51-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:1a09d5bd1a40d76ad90e5570530e082ddc000e1d92de495746f6257dc08f166b"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2be4e6294c53f2ec8ea36486b56390e3bcaa052bf3a9a47005687ccf376745d1"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca484ca11c65e05639ffe80f20d45e6be81fbec7683d6c9a15cd421e6e8b340"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0535d5b57d014d06ceeaeffd816bb3a6e2dddeb670222570b8c4953e2d2ea678"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af55cc207865d641a57f7044e98b08b09220da3d1b13a46f26487cc2f898a072"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-win32.whl", hash = "sha256:7af40425ac535cbda129d9915edcaa002afe35d84609fd3b9d6a8c46732e02ee"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-win_amd64.whl", hash = "sha256:8d1d7d63e5d2f4e92a39ae1e897a5d551720179bb8d1254883e7113d3826d43c"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eaeeb2464019765bc4340214fca1143081d49972864773f3f1e95dba5c7edc7d"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7deeae5071930abb3669b5185abb6c33ddfd2398f87660fafdb9e6a5fb0f3f2f"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0892e7ac8bc76da499ad3ee8de8da4d7905a3110b952e2a35a940dab1ffa550e"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-win32.whl", hash = "sha256:50e074aea505f4427151c286955ea025f51752fa42f9939749336672e0674c81"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-win_amd64.whl", hash = "sha256:3b0cd89a7bd03f57ae58263d0f828a072d1b440c8c2949f38f3b446148321171"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a33cb3f095e7d776ec76e79d92d83117438b6153510770fcd57b9c96f9ef623d"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cacc0b2dd7d22a918a9642fc89840a5d3cee18a0e1fe41080b1141b23b10916"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245c67c88e63f1523e9216cad6ba3107dea2d3ee19adc359597a628afcabfbcb"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-win32.whl", hash = "sha256:8e702e7489f39375601c7ea5a0bef207256828a2bc5986c65cb15cd0cf097a87"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-win_amd64.whl", hash = "sha256:0525c4905b4b52d8ccc3c203c9d7ab2a80329ffa077d4bacf31aefda7604dc65"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:1980e6eb6c9be49ea8f89889989127daafc43f0b1b6843d71efab1514973cca0"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec7a0ed9b32afdf337172678a4a0e6419775ba4e649b66f49415615fa47efbd"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352df882088a55293f621328ec33b6ffca936ad7f23013b22520542e1ab6ad1b"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:86a22143a4001f53bf58027b044da1fb10d67b62a785fc1390b5c7f089d9838c"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37bc677690fd33932182b85d37433845de612962ed080c3e4d92f758d1bd894"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-win32.whl", hash = "sha256:d0a83afab5e062abffcdcbcc74f9d3ba37b2385294dd0927ad65fc6ebe04e054"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-win_amd64.whl", hash = "sha256:a61184c7289146c8cff06b6b41807c6994c6d437278e72cf00ff7fe1c7a263d1"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:3f0ef620ecbab46e81035cf3dedfb412a7da35340500ba470f9ce43a1e6c423b"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c55040d8ea65414de7c47f1a23823cd9f3fad0dc93e6b6b728fee81230f817b"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ef80328e3fee2be0a1abe3fe9445d3a2e52a1282ba342d0dab6edf1fef4707"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8cafa6f885a0ff5e39efa9325195217bb47d5929ab0051636610d24aef45ade"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f2df79a46e130235bc5e1bbef4de0583fb19d481eaa0bffa76e8347ea45ec6"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-win32.whl", hash = "sha256:f2e5b6f5cf7c18df66d082604a1d9c7a2d18f7d1dbe9514a2afaccbb51cc4fc3"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-win_amd64.whl", hash = "sha256:5e180fff133d21a800c4f050733d59340f40d42364fcb9d14f6a67764bdc48d2"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7d8139ca0b9f93890ab899da678816518af74312bb8cd71fb721436a93a93298"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb18549b770351b54e1ab5da37d22bc530b8bfe2ee31e22b9ebe650640d2ef12"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55e699466106d09f028ab78d3c2e1f621b5ef2c8694598242259e4515715da7c"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2ad16880ccd971ac8e570550fbdef1385e094b022d6fc85ef3ce7df400dddad3"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97fd5bb6b7c1a64b7ac0632f7ce389b8ab362e7bd5f60654c2a418496be5d7f"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-win32.whl", hash = "sha256:cecb66492440ae8592797dd705a0cbaa6abe0555f4fa6c5f40b078bd2740fc6b"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-win_amd64.whl", hash = "sha256:39b02b645632c5fe46b8dd30755682f629ffbb62ff317ecc14c998c21b2896ff"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b03850c290c765b87102959ea53299dc9addf76ca08a06ea98383348ae205c99"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e646b19f47d655261b22df9976e572f588185279970efba3d45c377127d35349"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3cf56cc36d42908495760b223ca9c2c0f9f0002b4eddc994b24db5fcb86a9e4"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0d661cff58c91726c601cc0ee626bf167b20cc4d7941c93c5f3ac28dc34ddbea"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3823dda635988e6744d4417e13f2e2b5fe76c4bf29dd67e95f98717e1b094cad"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-win32.whl", hash = "sha256:b00cf0471888823b7a9f722c6c41eb6985cf34f077edcf62695ac4bed6ec01ee"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-win_amd64.whl", hash = "sha256:a055ba17f4675aadcda3005df2e28a86feb731fdcc865e1f6b4f209ed1225cba"}, - {file = "SQLAlchemy-1.4.51.tar.gz", hash = "sha256:e7908c2025eb18394e32d65dd02d2e37e17d733cdbe7d78231c2b6d7eb20cdb9"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, + {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, ] [package.dependencies] @@ -3855,13 +3812,13 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. [[package]] name = "types-boto" -version = "2.49.18.9" +version = "2.49.18.20240205" description = "Typing stubs for boto" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-boto-2.49.18.9.tar.gz", hash = "sha256:fe711d938c237be50346a1bdc2231d3170453fe734789075dd088458e4e9442d"}, - {file = "types_boto-2.49.18.9-py3-none-any.whl", hash = "sha256:b44e8aead5e34bc336a813af90fdbb9ac5bb1091de839042628163463d9948eb"}, + {file = "types-boto-2.49.18.20240205.tar.gz", hash = "sha256:6c7f3945e5759e1f8a760e2843adbeb1eea64f869f3a3070af7cfcfc25ea71bd"}, + {file = "types_boto-2.49.18.20240205-py3-none-any.whl", hash = "sha256:9873214ce37756a6145c165fb9beaf80cb4ac1df5a5967f6a0945109c8c4469a"}, ] [[package]] @@ -3913,13 +3870,13 @@ files = [ [[package]] name = "types-redis" -version = "4.6.0.20240106" +version = "4.6.0.20240425" description = "Typing stubs for redis" optional = false python-versions = ">=3.8" files = [ - {file = "types-redis-4.6.0.20240106.tar.gz", hash = "sha256:2b2fa3a78f84559616242d23f86de5f4130dfd6c3b83fb2d8ce3329e503f756e"}, - {file = "types_redis-4.6.0.20240106-py3-none-any.whl", hash = "sha256:912de6507b631934bd225cdac310b04a58def94391003ba83939e5a10e99568d"}, + {file = "types-redis-4.6.0.20240425.tar.gz", hash = "sha256:9402a10ee931d241fdfcc04592ebf7a661d7bb92a8dea631279f0d8acbcf3a22"}, + {file = "types_redis-4.6.0.20240425-py3-none-any.whl", hash = "sha256:ac5bc19e8f5997b9e76ad5d9cf15d0392d9f28cf5fc7746ea4a64b989c45c6a8"}, ] [package.dependencies] @@ -3928,13 +3885,13 @@ types-pyOpenSSL = "*" [[package]] name = "types-requests" -version = "2.31.0.20240106" +version = "2.31.0.20240406" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.20240106.tar.gz", hash = "sha256:0e1c731c17f33618ec58e022b614a1a2ecc25f7dc86800b36ef341380402c612"}, - {file = "types_requests-2.31.0.20240106-py3-none-any.whl", hash = "sha256:da997b3b6a72cc08d09f4dba9802fdbabc89104b35fe24ee588e674037689354"}, + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, ] [package.dependencies] @@ -4255,4 +4212,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "94cd1ef6449c0b8e8dc6d90152b64b6295aa6123b5d78ceb414eb1e139110462" +content-hash = "ca9fbdd9131c2decb054fb5d50ffb7d5fe4aa88cf0f4264e17b2ef1a1486f5b8" diff --git a/pyproject.toml b/pyproject.toml index 933b1066bb..4107d6a732 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ psycopg2-binary = "2.9.9" PyJWT = "2.8.0" pytz = "2021.3" PyYAML = "6.0.1" -SQLAlchemy = "1.4.51" +SQLAlchemy = "1.4.52" cachelib = "0.10.2" newrelic = "6.10.0.165" notifications-python-client = "6.4.1" @@ -64,7 +64,7 @@ Werkzeug = "2.3.7" MarkupSafe = "2.1.5" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.1.2" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.2.2" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.2.3" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.7.1" greenlet = "2.0.2" @@ -99,9 +99,9 @@ sqlalchemy-stubs = "0.4" sqlalchemy2-stubs = "0.0.2a38" networkx = "2.8.8" # not directly required, pinned by Snyk to avoid a vulnerability pytest-mock-resources = { extras = ["redis"], version = "2.9.2" } -types-boto = "2.49.18.9" +types-boto = "2.49.18.20240205" types-mock = "4.0.15.2" types-python-dateutil = "2.8.19.20240106" types-pytz = "2022.7.1.2" -types-requests = "2.31.0.20240106" -types-redis = "4.6.0.20240106" +types-requests = "2.31.0.20240406" +types-redis = "4.6.0.20240425" diff --git a/tests_cypress/package-lock.json b/tests_cypress/package-lock.json index 32ee535d12..ef4c975b59 100644 --- a/tests_cypress/package-lock.json +++ b/tests_cypress/package-lock.json @@ -186,40 +186,6 @@ "strip-ansi": "^7.0.1" } }, - "string-width-cjs": { - "version": "npm:string-width@4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - } - } - }, "strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", @@ -229,23 +195,6 @@ "ansi-regex": "^6.0.1" } }, - "strip-ansi-cjs": { - "version": "npm:strip-ansi@6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - } - } - }, "wrap-ansi": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", @@ -256,60 +205,6 @@ "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } - }, - "wrap-ansi-cjs": { - "version": "npm:wrap-ansi@7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - } - } } } }, @@ -781,9 +676,9 @@ "integrity": "sha512-e8xL6YvnwRwN/1ey0aTQRbsE50rmUtT7WXPnr7ZjAUhTm/kRMKBzxmCZRqiGhy5Q4lOLOHlOkGFg2wCObvENcQ==" }, "cypress-recurse": { - "version": "1.35.2", - "resolved": "https://registry.npmjs.org/cypress-recurse/-/cypress-recurse-1.35.2.tgz", - "integrity": "sha512-G6HfxP90xa7phw8oeOX4uabxcI9gE1ktkKHShcA3nCByrkMLs56+GIJVn0A+ws1tI0PGRKBz6+V9DHS5WnZX4A==", + "version": "1.35.3", + "resolved": "https://registry.npmjs.org/cypress-recurse/-/cypress-recurse-1.35.3.tgz", + "integrity": "sha512-NbFOpEuZT4tFqAB0jQqel7WtVNDe8pvSHE2TfXvYk4pspf3wq98OC2RhhLn3bMnoCnPtY4IHO7e37c+CZ9HnMA==", "requires": { "humanize-duration": "^3.27.3" } @@ -1317,9 +1212,9 @@ "dev": true }, "humanize-duration": { - "version": "3.29.0", - "resolved": "https://registry.npmjs.org/humanize-duration/-/humanize-duration-3.29.0.tgz", - "integrity": "sha512-G5wZGwYTLaQAmYqhfK91aw3xt6wNbJW1RnWDh4qP1PvF4T/jnkjx2RVhG5kzB2PGsYGTn+oSDBQp+dMdILLxcg==" + "version": "3.32.0", + "resolved": "https://registry.npmjs.org/humanize-duration/-/humanize-duration-3.32.0.tgz", + "integrity": "sha512-6WsXYTHJr7hXKqoqf5zoWza/lANRAqGlbnZnm0cjDykbXuez1JVXOQGmq0EPB45pXYAJyueRA3S3hfhmMbrMEQ==" }, "iconv-lite": { "version": "0.6.3", @@ -1856,9 +1751,9 @@ } }, "nodemailer": { - "version": "6.9.9", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.9.tgz", - "integrity": "sha512-dexTll8zqQoVJEZPwQAKzxxtFn0qTnjdQTchoU6Re9BUUGBJiOy3YMn/0ShTW6J5M0dfQ1NeDeRTTl4oIWgQMA==" + "version": "6.9.13", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.13.tgz", + "integrity": "sha512-7o38Yogx6krdoBf3jCAqnIN4oSQFx+fMa0I7dK1D+me9kBxx12D+/33wSb+fhOCtIxvYJ+4x4IMEhmhCKfAiOA==" }, "npm-run-path": { "version": "4.0.1", @@ -2230,6 +2125,17 @@ "strip-ansi": "^6.0.1" } }, + "string-width-cjs": { + "version": "npm:string-width@4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, "string_decoder": { "version": "0.10.31", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", @@ -2244,6 +2150,15 @@ "ansi-regex": "^5.0.1" } }, + "strip-ansi-cjs": { + "version": "npm:strip-ansi@6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, "strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", @@ -2422,6 +2337,17 @@ "strip-ansi": "^6.0.0" } }, + "wrap-ansi-cjs": { + "version": "npm:wrap-ansi@7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", From 12b9571eac64823ebf66276ba796644ab924bb48 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Wed, 22 May 2024 11:12:31 -0400 Subject: [PATCH 02/15] Use Pinpoint by default (#2173) --- .env.example | 1 + app/clients/sms/aws_pinpoint.py | 9 ++- app/clients/sms/aws_sns.py | 8 +- app/config.py | 1 + app/delivery/send_to_providers.py | 64 +++++++++++++--- .../versions/0450_enable_pinpoint_provider.py | 19 +++++ tests/app/clients/test_aws_pinpoint.py | 73 +++++++++++++++++++ tests/app/dao/test_provider_details_dao.py | 7 +- tests/app/delivery/test_send_to_providers.py | 71 +++++++++++++----- 9 files changed, 215 insertions(+), 38 deletions(-) create mode 100644 migrations/versions/0450_enable_pinpoint_provider.py create mode 100644 tests/app/clients/test_aws_pinpoint.py diff --git a/.env.example b/.env.example index cb36eefda5..6557dd4a88 100644 --- a/.env.example +++ b/.env.example @@ -22,3 +22,4 @@ CONTACT_FORM_EMAIL_ADDRESS = "" AWS_PINPOINT_SC_POOL_ID= AWS_PINPOINT_SC_TEMPLATE_IDS= +AWS_PINPOINT_DEFAULT_POOL_ID= diff --git a/app/clients/sms/aws_pinpoint.py b/app/clients/sms/aws_pinpoint.py index 37140323c0..bdb3ba7fa7 100644 --- a/app/clients/sms/aws_pinpoint.py +++ b/app/clients/sms/aws_pinpoint.py @@ -14,7 +14,6 @@ class AwsPinpointClient(SmsClient): def init_app(self, current_app, statsd_client, *args, **kwargs): self._client = boto3.client("pinpoint-sms-voice-v2", region_name="ca-central-1") super(AwsPinpointClient, self).__init__(*args, **kwargs) - # super(SmsClient, self).__init__(*args, **kwargs) self.current_app = current_app self.name = "pinpoint" self.statsd_client = statsd_client @@ -22,11 +21,15 @@ def init_app(self, current_app, statsd_client, *args, **kwargs): def get_name(self): return self.name - def send_sms(self, to, content, reference, multi=True, sender=None): - pool_id = self.current_app.config["AWS_PINPOINT_SC_POOL_ID"] + def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None): messageType = "TRANSACTIONAL" matched = False + if template_id is not None and str(template_id) in self.current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"]: + pool_id = self.current_app.config["AWS_PINPOINT_SC_POOL_ID"] + else: + pool_id = self.current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"] + for match in phonenumbers.PhoneNumberMatcher(to, "US"): matched = True to = phonenumbers.format_number(match.number, phonenumbers.PhoneNumberFormat.E164) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index cf6fe3e914..4847754d72 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -2,7 +2,6 @@ from time import monotonic import boto3 -import botocore import phonenumbers from notifications_utils.statsd_decorators import statsd @@ -27,7 +26,7 @@ def get_name(self): return self.name @statsd(namespace="clients.sns") - def send_sms(self, to, content, reference, multi=True, sender=None): + def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None): matched = False for match in phonenumbers.PhoneNumberMatcher(to, "US"): @@ -66,12 +65,9 @@ def send_sms(self, to, content, reference, multi=True, sender=None): try: start_time = monotonic() response = client.publish(PhoneNumber=to, Message=content, MessageAttributes=attributes) - except botocore.exceptions.ClientError as e: - self.statsd_client.incr("clients.sns.error") - raise str(e) except Exception as e: self.statsd_client.incr("clients.sns.error") - raise str(e) + raise e finally: elapsed_time = monotonic() - start_time self.current_app.logger.info("AWS SNS request finished in {}".format(elapsed_time)) diff --git a/app/config.py b/app/config.py index fa8e0e389d..aab8422f27 100644 --- a/app/config.py +++ b/app/config.py @@ -267,6 +267,7 @@ class Config(object): AWS_SES_SECRET_KEY = os.getenv("AWS_SES_SECRET_KEY") AWS_PINPOINT_REGION = os.getenv("AWS_PINPOINT_REGION", "us-west-2") AWS_PINPOINT_SC_POOL_ID = os.getenv("AWS_PINPOINT_SC_POOL_ID", None) + AWS_PINPOINT_DEFAULT_POOL_ID = os.getenv("AWS_PINPOINT_DEFAULT_POOL_ID", None) AWS_PINPOINT_CONFIGURATION_SET_NAME = os.getenv("AWS_PINPOINT_CONFIGURATION_SET_NAME", "pinpoint-configuration") AWS_PINPOINT_SC_TEMPLATE_IDS = env.list("AWS_PINPOINT_SC_TEMPLATE_IDS", []) AWS_US_TOLL_FREE_NUMBER = os.getenv("AWS_US_TOLL_FREE_NUMBER") diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index c291bbd16a..c7de7a32c2 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -2,9 +2,10 @@ import os import re from datetime import datetime -from typing import Dict +from typing import Any, Dict, Optional from uuid import UUID +import phonenumbers from flask import current_app from notifications_utils.recipients import ( validate_and_format_email_address, @@ -48,6 +49,7 @@ NOTIFICATION_VIRUS_SCAN_FAILED, PINPOINT_PROVIDER, SMS_TYPE, + SNS_PROVIDER, BounceRateStatus, Notification, Service, @@ -67,9 +69,9 @@ def send_sms_to_provider(notification): provider = provider_to_use( SMS_TYPE, notification.id, + notification.to, notification.international, notification.reply_to_text, - template_id=notification.template_id, ) template_dict = dao_get_template_by_id(notification.template_id, notification.template_version).__dict__ @@ -105,6 +107,7 @@ def send_sms_to_provider(notification): content=str(template), reference=str(notification.id), sender=notification.reply_to_text, + template_id=notification.template_id, ) except Exception as e: notification.billable_units = template.fragment_count @@ -336,16 +339,55 @@ def update_notification_to_sending(notification, provider): dao_update_notification(notification) -def provider_to_use(notification_type, notification_id, international=False, sender=None, template_id=None): - # Temporary redirect setup for template IDs that are meant for the short code usage. - if notification_type == SMS_TYPE and template_id is not None and str(template_id) in Config.AWS_PINPOINT_SC_TEMPLATE_IDS: - return clients.get_client_by_name_and_type("pinpoint", SMS_TYPE) +def provider_to_use( + notification_type: str, + notification_id: UUID, + to: Optional[str] = None, + international: bool = False, + sender: Optional[str] = None, +) -> Any: + """ + Get the provider to use for sending the notification. + SMS that are being sent with a dedicated number or to a US number should not use Pinpoint. + + Args: + notification_type (str): SMS or EMAIL. + notification_id (UUID): id of notification. Just used for logging. + to (str, optional): recipient. Defaults to None. + international (bool, optional): Recipient is international. Defaults to False. + sender (str, optional): reply_to_text to use. Defaults to None. + + Raises: + Exception: No active providers. - active_providers_in_order = [ - p - for p in get_provider_details_by_notification_type(notification_type, international) - if p.active and p.identifier != PINPOINT_PROVIDER - ] + Returns: + provider: Provider to use to send the notification. + """ + + has_dedicated_number = sender is not None and sender.startswith("+1") + sending_to_us_number = False + if to is not None: + match = next(iter(phonenumbers.PhoneNumberMatcher(to, "US")), None) + if match and phonenumbers.region_code_for_number(match.number) == "US": + sending_to_us_number = True + + if ( + has_dedicated_number + or sending_to_us_number + or current_app.config["AWS_PINPOINT_SC_POOL_ID"] is None + or current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"] is None + ): + active_providers_in_order = [ + p + for p in get_provider_details_by_notification_type(notification_type, international) + if p.active and p.identifier != PINPOINT_PROVIDER + ] + else: + active_providers_in_order = [ + p + for p in get_provider_details_by_notification_type(notification_type, international) + if p.active and p.identifier != SNS_PROVIDER + ] if not active_providers_in_order: current_app.logger.error("{} {} failed as no active providers".format(notification_type, notification_id)) diff --git a/migrations/versions/0450_enable_pinpoint_provider.py b/migrations/versions/0450_enable_pinpoint_provider.py new file mode 100644 index 0000000000..0c2c8247dd --- /dev/null +++ b/migrations/versions/0450_enable_pinpoint_provider.py @@ -0,0 +1,19 @@ +""" + +Revision ID: 0450_enable_pinpoint_provider +Revises: 0449_update_magic_link_auth +Create Date: 2021-01-08 09:03:00 .214680 + +""" +from alembic import op + +revision = "0450_enable_pinpoint_provider" +down_revision = "0449_update_magic_link_auth" + + +def upgrade(): + op.execute("UPDATE provider_details set active=true where identifier in ('pinpoint');") + + +def downgrade(): + op.execute("UPDATE provider_details set active=false where identifier in ('pinpoint');") diff --git a/tests/app/clients/test_aws_pinpoint.py b/tests/app/clients/test_aws_pinpoint.py new file mode 100644 index 0000000000..ad7546d1ad --- /dev/null +++ b/tests/app/clients/test_aws_pinpoint.py @@ -0,0 +1,73 @@ +import pytest + +from app import aws_pinpoint_client +from tests.conftest import set_config_values + + +@pytest.mark.serial +def test_send_sms_sends_to_default_pool(notify_api, mocker, sample_template): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "6135555555" + content = "foo" + reference = "ref" + + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [], + }, + ): + aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id) + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+16135555555", + OriginationIdentity="default_pool_id", + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) + + +@pytest.mark.serial +def test_send_sms_sends_to_shortcode_pool(notify_api, mocker, sample_template): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "6135555555" + content = "foo" + reference = "ref" + + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sample_template.id)], + }, + ): + aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id) + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+16135555555", + OriginationIdentity="sc_pool_id", + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) + + +def test_send_sms_returns_raises_error_if_there_is_no_valid_number_is_found(notify_api, mocker): + mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + + to = "" + content = reference = "foo" + + with pytest.raises(ValueError) as excinfo: + aws_pinpoint_client.send_sms(to, content, reference) + + assert "No valid numbers found for SMS delivery" in str(excinfo.value) diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index 5b8b8e5348..6acce65192 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -241,9 +241,14 @@ def test_get_sms_provider_with_equal_priority_returns_provider( def test_get_current_sms_provider_returns_active_only(restore_provider_details): + # Note that we currently have two active sms providers: sns and pinpoint. current_provider = get_current_provider("sms") current_provider.active = False dao_update_provider_details(current_provider) + current_provider = get_current_provider("sms") + current_provider.active = False + dao_update_provider_details(current_provider) + new_current_provider = get_current_provider("sms") assert new_current_provider is None @@ -308,5 +313,5 @@ def test_dao_get_provider_stats(notify_db_session): assert result[5].identifier == "pinpoint" assert result[5].notification_type == "sms" assert result[5].supports_international is False - assert result[5].active is False + assert result[5].active is True assert result[5].current_month_billable_sms == 0 diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index ea91e1a503..a8637afdfe 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -51,6 +51,53 @@ from tests.conftest import set_config_values +class TestProviderToUse: + def test_should_use_pinpoint_for_sms_by_default(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234") + assert provider.name == "pinpoint" + + def test_should_use_sns_for_sms_if_dedicated_number(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", False, "+12345678901") + assert provider.name == "sns" + + def test_should_use_sns_for_sms_if_sending_to_the_US(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+17065551234") + assert provider.name == "sns" + + @pytest.mark.parametrize("sc_pool_id, default_pool_id", [(None, "default_pool_id"), ("sc_pool_id", None)]) + def test_should_use_sns_if_pinpoint_not_configured(self, restore_provider_details, notify_api, sc_pool_id, default_pool_id): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": sc_pool_id, + "AWS_PINPOINT_DEFAULT_POOL_ID": default_pool_id, + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234") + assert provider.name == "sns" + + @pytest.mark.skip(reason="Currently using only 1 SMS provider") def test_should_return_highest_priority_active_provider(restore_provider_details): providers = provider_details_dao.get_provider_details_by_notification_type("sms") @@ -84,21 +131,6 @@ def test_should_return_highest_priority_active_provider(restore_provider_details assert send_to_providers.provider_to_use("sms", "1234").name == first.identifier -def test_provider_to_use(restore_provider_details): - providers = provider_details_dao.get_provider_details_by_notification_type("sms") - first = providers[0] - - assert first.identifier == "sns" - - # provider is still SNS if SMS and sender is set - provider = send_to_providers.provider_to_use("sms", "1234", False, "+12345678901") - assert first.identifier == provider.name - - # provider is highest priority sms provider if sender is not set - provider = send_to_providers.provider_to_use("sms", "1234", False) - assert first.identifier == provider.name - - def test_should_send_personalised_template_to_correct_sms_provider_and_persist(sample_sms_template_with_html, mocker): db_notification = save_notification( create_notification( @@ -120,6 +152,7 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist(s content="Sample service: Hello Jo\nHere is some HTML & entities", reference=str(db_notification.id), sender=current_app.config["FROM_NUMBER"], + template_id=sample_sms_template_with_html.id, ) notification = Notification.query.filter_by(id=db_notification.id).one() @@ -338,6 +371,7 @@ def test_send_sms_should_use_template_version_from_notification_not_latest(sampl content="Sample service: This is a template:\nwith a newline", reference=str(db_notification.id), sender=current_app.config["FROM_NUMBER"], + template_id=sample_template.id, ) persisted_notification = notifications_dao.get_notification_by_id(db_notification.id) @@ -416,7 +450,7 @@ def test_should_send_sms_with_downgraded_content(notify_db_session, mocker): send_to_providers.send_sms_to_provider(db_notification) - aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=gsm_message, reference=ANY, sender=ANY) + aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=gsm_message, reference=ANY, sender=ANY, template_id=ANY) def test_send_sms_should_use_service_sms_sender(sample_service, sample_template, mocker): @@ -429,7 +463,9 @@ def test_send_sms_should_use_service_sms_sender(sample_service, sample_template, db_notification, ) - app.aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=ANY, reference=ANY, sender=sms_sender.sms_sender) + app.aws_sns_client.send_sms.assert_called_once_with( + to=ANY, content=ANY, reference=ANY, sender=sms_sender.sms_sender, template_id=ANY + ) @pytest.mark.parametrize("research_mode,key_type", [(True, KEY_TYPE_NORMAL), (False, KEY_TYPE_TEST)]) @@ -800,6 +836,7 @@ def test_should_handle_sms_sender_and_prefix_message( sender=expected_sender, to=ANY, reference=ANY, + template_id=ANY, ) From 6919e3c739d243f7b89ad9e8f86999486b5504eb Mon Sep 17 00:00:00 2001 From: William B <7444334+whabanks@users.noreply.github.com> Date: Wed, 22 May 2024 16:10:17 -0400 Subject: [PATCH 03/15] Reinstate bulk sms limit (#2169) * Reinstate "Add error message for rows_with_combined_variable_content_too_long" (#2164)" This reverts commit 5b29366eeb563c81f7306c68ad41b9a0e7802bf7. * Reinstate bulk sms limit validation - Added an additional check to ensure that we only return an 4xx response if the template type is SMS * Bump utils version * Refresh lock file --- app/v2/notifications/post_notifications.py | 7 +++++++ poetry.lock | 8 ++++---- pyproject.toml | 2 +- tests/app/celery/test_tasks.py | 14 +++++++++----- .../v2/notifications/test_post_notifications.py | 4 ++-- 5 files changed, 23 insertions(+), 12 deletions(-) diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index b1fc62e447..d8edf8627b 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -7,6 +7,7 @@ import werkzeug from flask import abort, current_app, jsonify, request +from notifications_utils import SMS_CHAR_COUNT_LIMIT from notifications_utils.recipients import ( RecipientCSV, try_validate_and_format_phone_number, @@ -705,6 +706,12 @@ def check_for_csv_errors(recipient_csv, max_rows, remaining_messages): message=f"You cannot send to these recipients {explanation}", status_code=400, ) + if recipient_csv.template_type == SMS_TYPE and any(recipient_csv.rows_with_combined_variable_content_too_long): + raise BadRequestError( + message=f"Row {next(recipient_csv.rows_with_combined_variable_content_too_long).index + 1} - has a character count greater than {SMS_CHAR_COUNT_LIMIT} characters. Some messages may be too long due to custom content.", + status_code=400, + ) + if recipient_csv.rows_with_errors: def row_error(row): diff --git a/poetry.lock b/poetry.lock index 67aa854ed1..8209d2ed6f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2444,7 +2444,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.2.3" +version = "52.2.4" description = "Shared python code for Notification - Provides logging utils etc." optional = false python-versions = "~3.10.9" @@ -2479,8 +2479,8 @@ werkzeug = "2.3.7" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "52.2.3" -resolved_reference = "fc02dace174f93072345160787b2e603b74a984b" +reference = "52.2.4" +resolved_reference = "1e2c279333ee1b86671b82d8f562bb3e98446500" [[package]] name = "ordered-set" @@ -4212,4 +4212,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "ca9fbdd9131c2decb054fb5d50ffb7d5fe4aa88cf0f4264e17b2ef1a1486f5b8" +content-hash = "62653f4a581d32ac1678c8454f1100320096e8166aca1599cffd6fd3f72cfb4b" diff --git a/pyproject.toml b/pyproject.toml index 4107d6a732..9844f1c756 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ Werkzeug = "2.3.7" MarkupSafe = "2.1.5" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.1.2" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.2.3" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.2.4" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.7.1" greenlet = "2.0.2" diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index a2f0c367a7..aeeb6c8c76 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -2,7 +2,7 @@ import uuid from datetime import datetime, timedelta from unittest import mock -from unittest.mock import Mock, call +from unittest.mock import MagicMock, Mock, call import pytest import requests_mock @@ -891,9 +891,10 @@ def test_process_rows_sends_save_task( mocker.patch("app.celery.tasks.create_uuid", return_value="noti_uuid") task_mock = mocker.patch("app.celery.tasks.{}".format(expected_function)) signer_mock = mocker.patch("app.celery.tasks.signer_notification.sign") - template = Mock(id="template_id", template_type=template_type, process_type=NORMAL) + template = MagicMock(id="template_id", template_type=template_type, process_type=NORMAL) job = Mock(id="job_id", template_version="temp_vers", notification_count=1, api_key_id=api_key_id, sender_id=sender_id) service = Mock(id="service_id", research_mode=research_mode) + template.__len__.return_value = 1 process_rows( [ @@ -950,10 +951,11 @@ def test_should_redirect_email_job_to_queue_depending_on_csv_threshold( ): mock_save_email = mocker.patch("app.celery.tasks.save_emails") - template = Mock(id=1, template_type=EMAIL_TYPE, process_type=template_process_type) + template = MagicMock(id=1, template_type=EMAIL_TYPE, process_type=template_process_type) api_key = Mock(id=1, key_type=KEY_TYPE_NORMAL) job = Mock(id=1, template_version="temp_vers", notification_count=1, api_key=api_key) service = Mock(id=1, research_mode=False) + template.__len__.return_value = 1 row = next( RecipientCSV( @@ -994,10 +996,11 @@ def test_should_redirect_sms_job_to_queue_depending_on_csv_threshold( ): mock_save_sms = mocker.patch("app.celery.tasks.save_smss") - template = Mock(id=1, template_type=SMS_TYPE, process_type=template_process_type) + template = MagicMock(id=1, template_type=SMS_TYPE, process_type=template_process_type) api_key = Mock(id=1, key_type=KEY_TYPE_NORMAL) job = Mock(id=1, template_version="temp_vers", notification_count=1, api_key=api_key) service = Mock(id=1, research_mode=False) + template.__len__.return_value = 1 row = next( RecipientCSV( @@ -1066,7 +1069,8 @@ def test_process_rows_works_without_key_type( mocker.patch("app.celery.tasks.create_uuid", return_value="noti_uuid") task_mock = mocker.patch("app.celery.tasks.{}".format(expected_function)) signer_mock = mocker.patch("app.celery.tasks.signer_notification.sign") - template = Mock(id="template_id", template_type=template_type, process_type=NORMAL) + template = MagicMock(id="template_id", template_type=template_type, process_type=NORMAL) + template.__len__.return_value = 1 api_key = {} job = Mock( id="job_id", diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index 93b81c47cd..cc33c4d527 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -2523,7 +2523,7 @@ def test_post_bulk_with_too_large_sms_fails(self, client, notify_db, notify_db_s mocker.patch("app.v2.notifications.post_notifications.create_bulk_job", return_value=str(uuid.uuid4())) service = create_service(sms_daily_limit=10, message_limit=100) - template = create_sample_template(notify_db, notify_db_session, service=service, template_type="sms", content="a" * 612) + template = create_sample_template(notify_db, notify_db_session, service=service, template_type="sms", content="a" * 613) data = { "name": "job_name", "template_id": template.id, @@ -2574,7 +2574,7 @@ def test_post_bulk_with_too_large_sms_fail_and_shows_correct_row( ) assert response.status_code == 400 assert "has a character count greater than" in str(response.data) - assert "row #{}".format(failure_row) in str(response.data) + assert "Row {}".format(failure_row) in str(response.data) class TestBatchPriorityLanes: From dccea535d4ab7d882d28d7c9b1d85c0a52089815 Mon Sep 17 00:00:00 2001 From: Jimmy Royer Date: Thu, 23 May 2024 17:14:38 -0400 Subject: [PATCH 04/15] Rework callback logging (#2078) * Decoupled scan malware code + lowering retry period for high priority emails * Extract common email retry handling logic into its own function * Cleaned up import * Forgot to provide default value to optional fn arg * Fixed test import * Isolated retry task param builder in a class * Cleaned up import * Fixed moved refs * Trying a different strategy to fix circular import * Fixing another bad import ref * Introducing celery utils module instead of using celery root one * Cover edge cases + modified tests * Formatting * Sort imports * Make notification_process_type param optional * Fixed edge case when template not associated with notification obj * Fixing params order * Fixing regression tests * More tests * Added null protection against a potential NPE * Formatting * Fix imports * Moved logging for callback prior to actual call * Format service_callback_tasks.py --- app/celery/service_callback_tasks.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/app/celery/service_callback_tasks.py b/app/celery/service_callback_tasks.py index af3d51e3b2..9296958f85 100644 --- a/app/celery/service_callback_tasks.py +++ b/app/celery/service_callback_tasks.py @@ -59,6 +59,7 @@ def send_complaint_to_service(self, complaint_data): def _send_data_to_service_callback_api(self, data, service_callback_url, token, function_name): notification_id = data["notification_id"] if "notification_id" in data else data["id"] try: + current_app.logger.info("{} sending {} to {}".format(function_name, notification_id, service_callback_url)) response = request( method="POST", url=service_callback_url, @@ -69,9 +70,11 @@ def _send_data_to_service_callback_api(self, data, service_callback_url, token, }, timeout=5, ) + current_app.logger.info( f"{function_name} sending {notification_id} to {service_callback_url}, response {response.status_code}" ) + response.raise_for_status() except RequestException as e: current_app.logger.warning( From f85df046a34db5f76b4c1c2029640adfc1845c7f Mon Sep 17 00:00:00 2001 From: Mike Pond <32133001+P0NDER0SA@users.noreply.github.com> Date: Mon, 27 May 2024 10:49:41 -0400 Subject: [PATCH 05/15] Migration Creation of DB Users/roles (#2179) * Adding a db user creation to the migrations * Just removing redundant code * Update 0451_create_db_users.py removing the test database logic and creating super role if it doesn't exist * Update 0451_create_db_users.py removing unnecessary code! * Update 0451_create_db_users.py text change * Update 0451_create_db_users.py formatting --- migrations/versions/0451_create_db_users.py | 39 +++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 migrations/versions/0451_create_db_users.py diff --git a/migrations/versions/0451_create_db_users.py b/migrations/versions/0451_create_db_users.py new file mode 100644 index 0000000000..2d7a812db0 --- /dev/null +++ b/migrations/versions/0451_create_db_users.py @@ -0,0 +1,39 @@ +""" + +Revision ID: 0451_create_db_users +Revises: 0450_enable_pinpoint_provider +Create Date: 2024-05-23 12:00:00 + +""" +from alembic import op + +revision = "0451_create_db_users" +down_revision = "0450_enable_pinpoint_provider" + +super_role = "rds_superuser" +roles = ["app_db_user", "quicksight_db_user"] + + +def upgrade(): + create_role_if_not_exist(super_role) + for role in roles: + create_role_if_not_exist(role) + op.execute(f"GRANT {role} TO {super_role} WITH ADMIN OPTION;") + + +def create_role_if_not_exist(role): + """ + Makes sure the expected user exists in the database before performing the GRANT USER operation. + If the user already exists, nothing happens. This is needed so that the migrations can be + run on localhost where the users do not exist. + """ + op.execute( + f""" + DO $$ + BEGIN + CREATE ROLE {role}; + EXCEPTION WHEN duplicate_object THEN RAISE NOTICE '%, skipping', SQLERRM USING ERRCODE = SQLSTATE; + END + $$; + """ + ) From de2cc87f3920b64d1b1d87d8de13c107773dc34b Mon Sep 17 00:00:00 2001 From: Pat Heard Date: Mon, 27 May 2024 12:54:52 -0400 Subject: [PATCH 06/15] feat: add migration to set the pgAudit config (#2172) Add migration to disable pgAudit on the app_db_user and rdsproxyadmin user. This is being done to save costs and reduce the logging pressure on the database instances. --- .../versions/0452_set_pgaudit_config.py | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 migrations/versions/0452_set_pgaudit_config.py diff --git a/migrations/versions/0452_set_pgaudit_config.py b/migrations/versions/0452_set_pgaudit_config.py new file mode 100644 index 0000000000..88f0e87b8e --- /dev/null +++ b/migrations/versions/0452_set_pgaudit_config.py @@ -0,0 +1,53 @@ +""" + +Revision ID: 0452_set_pgaudit_config +Revises: 0451_create_db_users +Create Date: 2024-05-27 12:00:00 + +""" +from alembic import op + +revision = "0452_set_pgaudit_config" +down_revision = "0451_create_db_users" + +users = ["app_db_user", "rdsproxyadmin"] +database_name = op.get_bind().engine.url.database # database name that the migration is being run on + + +def upgrade(): + # Skip this migration in the test database as there are multiple test databases that are created. + # This leads to a race condition attempting to alter the same users multiple times and causes + # sporadic unit test failures. + if "test_notification_api" in database_name: + return + + for user in users: + create_user_if_not_exists(user) + op.execute(f"ALTER USER {user} SET pgaudit.log TO 'NONE'") + + +def downgrade(): + if "test_notification_api" in database_name: + return + + # Reset the pgaudit.log setting + for user in users: + op.execute(f"ALTER USER {user} RESET pgaudit.log") + + +def create_user_if_not_exists(user): + """ + Makes sure the expected user exists in the database before performing the ALTER USER operation. + If the user already exists, nothing happens. This is needed so that the migrations can be + run on localhost where the users do not exist. + """ + op.execute( + f""" + DO $$ + BEGIN + CREATE USER {user}; + EXCEPTION WHEN duplicate_object THEN RAISE NOTICE '%, skipping', SQLERRM USING ERRCODE = SQLSTATE; + END + $$; + """ + ) From 43d07f084c67e2131126a9b2b025072e7f07a6a2 Mon Sep 17 00:00:00 2001 From: Mike Pond <32133001+P0NDER0SA@users.noreply.github.com> Date: Tue, 28 May 2024 10:48:02 -0400 Subject: [PATCH 07/15] fixing bug for role grant (#2180) * Update 0451_create_db_users.py fixing bug for role grant * Update migrations/versions/0451_create_db_users.py Co-authored-by: Pat Heard --------- Co-authored-by: Pat Heard --- migrations/versions/0451_create_db_users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/migrations/versions/0451_create_db_users.py b/migrations/versions/0451_create_db_users.py index 2d7a812db0..c16c162db5 100644 --- a/migrations/versions/0451_create_db_users.py +++ b/migrations/versions/0451_create_db_users.py @@ -18,7 +18,7 @@ def upgrade(): create_role_if_not_exist(super_role) for role in roles: create_role_if_not_exist(role) - op.execute(f"GRANT {role} TO {super_role} WITH ADMIN OPTION;") + op.execute(f"GRANT {super_role} TO {role} WITH ADMIN OPTION;") def create_role_if_not_exist(role): From c0d9615f6da2b42c427d39ee9da282890dbcbe03 Mon Sep 17 00:00:00 2001 From: "sre-read-write[bot]" <92993749+sre-read-write[bot]@users.noreply.github.com> Date: Fri, 31 May 2024 17:10:50 -0400 Subject: [PATCH 08/15] chore: synced file(s) with cds-snc/site-reliability-engineering (#2181) * chore: synced local '.github/workflows/s3-backup.yml' with remote 'tools/sre_file_sync/s3-backup.yml' * chore: synced local '.github/workflows/export_github_data.yml' with remote 'tools/sre_file_sync/export_github_data.yml' * chore: synced local '.github/workflows/ossf-scorecard.yml' with remote 'tools/sre_file_sync/ossf-scorecard.yml' --------- Co-authored-by: sre-read-write[bot] <92993749+sre-read-write[bot]@users.noreply.github.com> --- .github/workflows/export_github_data.yml | 2 +- .github/workflows/ossf-scorecard.yml | 4 ++-- .github/workflows/s3-backup.yml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/export_github_data.yml b/.github/workflows/export_github_data.yml index d12fdc1360..51ccbcb18b 100644 --- a/.github/workflows/export_github_data.yml +++ b/.github/workflows/export_github_data.yml @@ -14,7 +14,7 @@ jobs: DNS_PROXY_FORWARDTOSENTINEL: "true" DNS_PROXY_LOGANALYTICSWORKSPACEID: ${{ secrets.LOG_ANALYTICS_WORKSPACE_ID }} DNS_PROXY_LOGANALYTICSSHAREDKEY: ${{ secrets.LOG_ANALYTICS_WORKSPACE_KEY }} - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - name: Export Data uses: cds-snc/github-repository-metadata-exporter@main with: diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml index 9102cfe9ee..437124670c 100644 --- a/.github/workflows/ossf-scorecard.yml +++ b/.github/workflows/ossf-scorecard.yml @@ -20,12 +20,12 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@50aaf84fb1a9f22255cb8bfb1729f4dd085c838c + uses: ossf/scorecard-action@c64f0a7231aa68a6849c2b65bf16af3daa23d3e6 with: results_file: ossf-results.json results_format: json diff --git a/.github/workflows/s3-backup.yml b/.github/workflows/s3-backup.yml index 5262e9bf97..b19055191c 100644 --- a/.github/workflows/s3-backup.yml +++ b/.github/workflows/s3-backup.yml @@ -10,7 +10,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: fetch-depth: 0 # retrieve all history From ee480853a02269c2e4af9d780e2310cace28d427 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Mon, 3 Jun 2024 12:58:12 -0400 Subject: [PATCH 09/15] send with sc pool but sns by default (#2184) Co-authored-by: Ben Larabie --- app/delivery/send_to_providers.py | 13 +++++++--- tests/app/delivery/test_send_to_providers.py | 27 +++++++++++++++++++- 2 files changed, 36 insertions(+), 4 deletions(-) diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index c7de7a32c2..5f74f76033 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -72,6 +72,7 @@ def send_sms_to_provider(notification): notification.to, notification.international, notification.reply_to_text, + template_id=notification.template_id, ) template_dict = dao_get_template_by_id(notification.template_id, notification.template_version).__dict__ @@ -345,6 +346,7 @@ def provider_to_use( to: Optional[str] = None, international: bool = False, sender: Optional[str] = None, + template_id: Optional[UUID] = None, ) -> Any: """ Get the provider to use for sending the notification. @@ -356,6 +358,7 @@ def provider_to_use( to (str, optional): recipient. Defaults to None. international (bool, optional): Recipient is international. Defaults to False. sender (str, optional): reply_to_text to use. Defaults to None. + template_id (str, optional): template_id to use. Defaults to None. Raises: Exception: No active providers. @@ -371,12 +374,16 @@ def provider_to_use( if match and phonenumbers.region_code_for_number(match.number) == "US": sending_to_us_number = True - if ( + using_sc_pool_template = template_id is not None and str(template_id) in current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"] + + do_not_use_pinpoint = ( has_dedicated_number or sending_to_us_number or current_app.config["AWS_PINPOINT_SC_POOL_ID"] is None - or current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"] is None - ): + or (current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"] is None and not using_sc_pool_template) + ) + + if do_not_use_pinpoint: active_providers_in_order = [ p for p in get_provider_details_by_notification_type(notification_type, international) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index a8637afdfe..6b7dbf2210 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -52,7 +52,7 @@ class TestProviderToUse: - def test_should_use_pinpoint_for_sms_by_default(self, restore_provider_details, notify_api): + def test_should_use_pinpoint_for_sms_by_default_if_configured(self, restore_provider_details, notify_api): with set_config_values( notify_api, { @@ -63,6 +63,31 @@ def test_should_use_pinpoint_for_sms_by_default(self, restore_provider_details, provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234") assert provider.name == "pinpoint" + def test_should_use_sns_for_sms_by_default_if_partially_configured(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": None, + "AWS_PINPOINT_SC_TEMPLATE_IDS": [], + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", template_id=uuid.uuid4()) + assert provider.name == "sns" + + def test_should_use_pinpoint_for_sms_for_sc_template_if_sc_pool_configured(self, restore_provider_details, notify_api): + sc_template = uuid.uuid4() + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": None, + "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sc_template)], + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", template_id=sc_template) + assert provider.name == "pinpoint" + def test_should_use_sns_for_sms_if_dedicated_number(self, restore_provider_details, notify_api): with set_config_values( notify_api, From af825ef0ead093dcecada0a1b53ac134159dc0e3 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Tue, 4 Jun 2024 13:23:43 -0400 Subject: [PATCH 10/15] fix pinpoint delivery callback for shortcodes (#2189) --- app/aws/mocks.py | 25 ++++++++++++++++++- app/celery/process_pinpoint_receipts_tasks.py | 8 +++--- .../test_process_pinpoint_receipts_tasks.py | 14 ++++++++--- 3 files changed, 40 insertions(+), 7 deletions(-) diff --git a/app/aws/mocks.py b/app/aws/mocks.py index 7a7943a0ac..99c7dad216 100644 --- a/app/aws/mocks.py +++ b/app/aws/mocks.py @@ -199,7 +199,7 @@ def pinpoint_successful_callback(reference=None, timestamp=1467074434, destinati "eventVersion": "1.0", "eventTimestamp": timestamp, "isFinal": False, - "originationPhoneNumber": "+18078061258", + "originationPhoneNumber": "+13655550100", "destinationPhoneNumber": destination, "isoCountryCode": "CA", "mcc": "302", @@ -245,6 +245,29 @@ def pinpoint_delivered_callback(reference=None, timestamp=1467074434, destinatio return _pinpoint_callback(body) +def pinpoint_shortcode_delivered_callback(reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_SUCCESSFUL", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": True, + "originationPhoneNumber": "555555", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "SUCCESSFUL", + "messageStatusDescription": "Message has been accepted by phone carrier", + "totalMessageParts": 1, + "totalMessagePrice": 0.02183, + "totalCarrierFee": 0.005, + } + + return _pinpoint_callback(body) + + # Note that 1467074434 = 2016-06-28 00:40:34.558 UTC def pinpoint_failed_callback(provider_response, reference=None, timestamp=1467074434, destination="+1XXX5550100"): body = { diff --git a/app/celery/process_pinpoint_receipts_tasks.py b/app/celery/process_pinpoint_receipts_tasks.py index 6192ee40cc..d5bbb4d1bc 100644 --- a/app/celery/process_pinpoint_receipts_tasks.py +++ b/app/celery/process_pinpoint_receipts_tasks.py @@ -51,8 +51,9 @@ def process_pinpoint_results(self, response): reference = receipt["messageId"] status = receipt["messageStatus"] provider_response = receipt["messageStatusDescription"] + isFinal = receipt["isFinal"] - notification_status = determine_pinpoint_status(status, provider_response) + notification_status = determine_pinpoint_status(status, provider_response, isFinal) if notification_status == NOTIFICATION_SENT: return # we don't want to update the status to sent if it's already sent @@ -116,18 +117,19 @@ def process_pinpoint_results(self, response): self.retry(queue=QueueNames.RETRY) -def determine_pinpoint_status(status: str, provider_response: str) -> Union[str, None]: +def determine_pinpoint_status(status: str, provider_response: str, isFinal: bool) -> Union[str, None]: """Determine the notification status based on the SMS status and provider response. Args: status (str): message status from AWS provider_response (str): detailed status from the SMS provider + isFinal (bool): whether this is the last update for this send Returns: Union[str, None]: the notification status or None if the status is not handled """ - if status == "DELIVERED": + if status == "DELIVERED" or status == "SUCCESSFUL" and isFinal: return NOTIFICATION_DELIVERED elif status == "SUCCESSFUL": # carrier has accepted the message but it hasn't gone to the phone yet return NOTIFICATION_SENT diff --git a/tests/app/celery/test_process_pinpoint_receipts_tasks.py b/tests/app/celery/test_process_pinpoint_receipts_tasks.py index 60b8096170..ea9bfc0654 100644 --- a/tests/app/celery/test_process_pinpoint_receipts_tasks.py +++ b/tests/app/celery/test_process_pinpoint_receipts_tasks.py @@ -7,6 +7,7 @@ from app.aws.mocks import ( pinpoint_delivered_callback, pinpoint_failed_callback, + pinpoint_shortcode_delivered_callback, pinpoint_successful_callback, ) from app.celery.process_pinpoint_receipts_tasks import process_pinpoint_results @@ -28,7 +29,14 @@ ) -def test_process_pinpoint_results_delivered(sample_template, notify_db, notify_db_session, mocker): +@pytest.mark.parametrize( + "callback, expected_response", + [ + (pinpoint_delivered_callback, "Message has been accepted by phone"), + (pinpoint_shortcode_delivered_callback, "Message has been accepted by phone carrier"), + ], +) +def test_process_pinpoint_results_delivered(sample_template, notify_db, notify_db_session, callback, expected_response, mocker): mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.info") mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") @@ -43,11 +51,11 @@ def test_process_pinpoint_results_delivered(sample_template, notify_db, notify_d ) assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT - process_pinpoint_results(pinpoint_delivered_callback(reference="ref")) + process_pinpoint_results(callback(reference="ref")) assert mock_callback_task.called_once_with(get_notification_by_id(notification.id)) assert get_notification_by_id(notification.id).status == NOTIFICATION_DELIVERED - assert get_notification_by_id(notification.id).provider_response == "Message has been accepted by phone" + assert get_notification_by_id(notification.id).provider_response == expected_response mock_logger.assert_called_once_with(f"Pinpoint callback return status of delivered for notification: {notification.id}") From 72eb7f748b7955029ab97c3c2a90f064e5e751da Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Tue, 4 Jun 2024 15:04:35 -0400 Subject: [PATCH 11/15] Fix fake callback (#2178) * add fake pinpoint callback * use correct callback generator * refactor fake pinpoint callback code --------- Co-authored-by: Jimmy Royer --- app/celery/research_mode_tasks.py | 33 ++++++++++++++++++-- tests/app/celery/test_research_mode_tasks.py | 26 +++++++++++++++ 2 files changed, 57 insertions(+), 2 deletions(-) diff --git a/app/celery/research_mode_tasks.py b/app/celery/research_mode_tasks.py index cae829f09c..cf1c013f8c 100644 --- a/app/celery/research_mode_tasks.py +++ b/app/celery/research_mode_tasks.py @@ -6,6 +6,8 @@ from app import create_uuid, notify_celery from app.aws.mocks import ( + pinpoint_delivered_callback, + pinpoint_failed_callback, ses_hard_bounce_callback, ses_notification_callback, ses_soft_bounce_callback, @@ -14,9 +16,11 @@ sns_success_callback, ) from app.aws.s3 import file_exists +from app.celery.process_pinpoint_receipts_tasks import process_pinpoint_results from app.celery.process_ses_receipts_tasks import process_ses_results from app.celery.process_sns_receipts_tasks import process_sns_results from app.config import QueueNames +from app.models import PINPOINT_PROVIDER, SNS_PROVIDER temp_fail = "+15149301633" perm_fail = "+15149301632" @@ -29,8 +33,14 @@ def send_sms_response(provider, to, reference=None): reference = reference or str(create_uuid()) - body = aws_sns_callback(reference, to) - process_sns_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + if provider == SNS_PROVIDER: + body = aws_sns_callback(reference, to) + process_sns_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + elif provider == PINPOINT_PROVIDER: + body = aws_pinpoint_callback(reference, to) + process_pinpoint_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + else: + raise ValueError("Provider {} not supported".format(provider)) return reference @@ -64,6 +74,25 @@ def aws_sns_callback(notification_id, to): return sns_success_callback(notification_id, destination=to, timestamp=timestamp) +def aws_pinpoint_callback(notification_id, to): + now = datetime.now() + timestamp = now.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + using_test_perm_fail_number = to.strip().endswith(perm_fail) + using_test_temp_fail_number = to.strip().endswith(temp_fail) + + if using_test_perm_fail_number or using_test_temp_fail_number: + return pinpoint_failed_callback( + "Phone is currently unreachable/unavailable" + if using_test_perm_fail_number + else "Phone carrier is currently unreachable/unavailable", + notification_id, + destination=to, + timestamp=timestamp, + ) + else: + return pinpoint_delivered_callback(notification_id, destination=to, timestamp=timestamp) + + @notify_celery.task( bind=True, name="create-fake-letter-response-file", diff --git a/tests/app/celery/test_research_mode_tasks.py b/tests/app/celery/test_research_mode_tasks.py index b5e02ab6d3..b667132f4b 100644 --- a/tests/app/celery/test_research_mode_tasks.py +++ b/tests/app/celery/test_research_mode_tasks.py @@ -8,6 +8,8 @@ from freezegun import freeze_time from app.aws.mocks import ( + pinpoint_delivered_callback, + pinpoint_failed_callback, ses_notification_callback, sns_failed_callback, sns_success_callback, @@ -50,6 +52,30 @@ def test_make_sns_success_callback(notify_api, mocker, phone_number, sns_callbac assert message_celery == sns_callback(**sns_callback_args) +@pytest.mark.parametrize( + "phone_number, pinpoint_callback, pinpoint_callback_args", + [ + ("+15149301630", pinpoint_delivered_callback, {}), + ("+15149301631", pinpoint_delivered_callback, {}), + ("+15149301632", pinpoint_failed_callback, {"provider_response": "Phone is currently unreachable/unavailable"}), + ("+15149301633", pinpoint_failed_callback, {"provider_response": "Phone carrier is currently unreachable/unavailable"}), + ], +) +@freeze_time("2018-01-25 14:00:30") +def test_make_pinpoint_success_callback(notify_api, mocker, phone_number, pinpoint_callback, pinpoint_callback_args): + mock_task = mocker.patch("app.celery.research_mode_tasks.process_pinpoint_results") + some_ref = str(uuid.uuid4()) + now = datetime.now() + timestamp = now.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + + send_sms_response("pinpoint", phone_number, some_ref) + + mock_task.apply_async.assert_called_once_with(ANY, queue=QueueNames.RESEARCH_MODE) + message_celery = mock_task.apply_async.call_args[0][0][0] + pinpoint_callback_args.update({"reference": some_ref, "destination": phone_number, "timestamp": timestamp}) + assert message_celery == pinpoint_callback(**pinpoint_callback_args) + + def test_make_ses_callback(notify_api, mocker): mock_task = mocker.patch("app.celery.research_mode_tasks.process_ses_results") some_ref = str(uuid.uuid4()) From 63b6e2d36eada28f1d9cafcc5c85560fff0a7e38 Mon Sep 17 00:00:00 2001 From: Steve Astels Date: Wed, 5 Jun 2024 14:32:03 -0400 Subject: [PATCH 12/15] use empty string for default pool id value (#2191) --- app/config.py | 4 ++-- app/delivery/send_to_providers.py | 5 ++--- tests/app/delivery/test_send_to_providers.py | 6 +++--- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/app/config.py b/app/config.py index aab8422f27..b8b6521ad5 100644 --- a/app/config.py +++ b/app/config.py @@ -266,8 +266,8 @@ class Config(object): AWS_SES_ACCESS_KEY = os.getenv("AWS_SES_ACCESS_KEY") AWS_SES_SECRET_KEY = os.getenv("AWS_SES_SECRET_KEY") AWS_PINPOINT_REGION = os.getenv("AWS_PINPOINT_REGION", "us-west-2") - AWS_PINPOINT_SC_POOL_ID = os.getenv("AWS_PINPOINT_SC_POOL_ID", None) - AWS_PINPOINT_DEFAULT_POOL_ID = os.getenv("AWS_PINPOINT_DEFAULT_POOL_ID", None) + AWS_PINPOINT_SC_POOL_ID = os.getenv("AWS_PINPOINT_SC_POOL_ID", "") + AWS_PINPOINT_DEFAULT_POOL_ID = os.getenv("AWS_PINPOINT_DEFAULT_POOL_ID", "") AWS_PINPOINT_CONFIGURATION_SET_NAME = os.getenv("AWS_PINPOINT_CONFIGURATION_SET_NAME", "pinpoint-configuration") AWS_PINPOINT_SC_TEMPLATE_IDS = env.list("AWS_PINPOINT_SC_TEMPLATE_IDS", []) AWS_US_TOLL_FREE_NUMBER = os.getenv("AWS_US_TOLL_FREE_NUMBER") diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index 5f74f76033..5ef24c0769 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -379,10 +379,9 @@ def provider_to_use( do_not_use_pinpoint = ( has_dedicated_number or sending_to_us_number - or current_app.config["AWS_PINPOINT_SC_POOL_ID"] is None - or (current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"] is None and not using_sc_pool_template) + or not current_app.config["AWS_PINPOINT_SC_POOL_ID"] + or ((not current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"]) and not using_sc_pool_template) ) - if do_not_use_pinpoint: active_providers_in_order = [ p diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 6b7dbf2210..0768d98cc9 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -68,7 +68,7 @@ def test_should_use_sns_for_sms_by_default_if_partially_configured(self, restore notify_api, { "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", - "AWS_PINPOINT_DEFAULT_POOL_ID": None, + "AWS_PINPOINT_DEFAULT_POOL_ID": "", "AWS_PINPOINT_SC_TEMPLATE_IDS": [], }, ): @@ -81,7 +81,7 @@ def test_should_use_pinpoint_for_sms_for_sc_template_if_sc_pool_configured(self, notify_api, { "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", - "AWS_PINPOINT_DEFAULT_POOL_ID": None, + "AWS_PINPOINT_DEFAULT_POOL_ID": "", "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sc_template)], }, ): @@ -110,7 +110,7 @@ def test_should_use_sns_for_sms_if_sending_to_the_US(self, restore_provider_deta provider = send_to_providers.provider_to_use("sms", "1234", "+17065551234") assert provider.name == "sns" - @pytest.mark.parametrize("sc_pool_id, default_pool_id", [(None, "default_pool_id"), ("sc_pool_id", None)]) + @pytest.mark.parametrize("sc_pool_id, default_pool_id", [("", "default_pool_id"), ("sc_pool_id", "")]) def test_should_use_sns_if_pinpoint_not_configured(self, restore_provider_details, notify_api, sc_pool_id, default_pool_id): with set_config_values( notify_api, From 768fbe9478f3f652cafb06e24d69ec445578da21 Mon Sep 17 00:00:00 2001 From: "sre-read-write[bot]" <92993749+sre-read-write[bot]@users.noreply.github.com> Date: Mon, 10 Jun 2024 09:25:25 -0400 Subject: [PATCH 13/15] chore: synced local '.github/workflows/ossf-scorecard.yml' with remote 'tools/sre_file_sync/ossf-scorecard.yml' (#2188) Co-authored-by: sre-read-write[bot] <92993749+sre-read-write[bot]@users.noreply.github.com> --- .github/workflows/ossf-scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml index 437124670c..dd97d6ea44 100644 --- a/.github/workflows/ossf-scorecard.yml +++ b/.github/workflows/ossf-scorecard.yml @@ -25,7 +25,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@c64f0a7231aa68a6849c2b65bf16af3daa23d3e6 + uses: ossf/scorecard-action@7699f539c2b9ff754039f0e173fdf1a4e4a1e143 with: results_file: ossf-results.json results_format: json From f6ed3af403519a15de6041e513dec13af9d247fc Mon Sep 17 00:00:00 2001 From: Jumana B Date: Mon, 10 Jun 2024 12:32:59 -0400 Subject: [PATCH 14/15] Task/remove letters (#2185) * Remove letter code * remove more letter code --- app/celery/letters_pdf_tasks.py | 361 +------ app/dao/notifications_dao.py | 40 - app/letters/rest.py | 8 +- app/letters/utils.py | 188 +--- app/service/rest.py | 56 +- tests/app/celery/test_letters_pdf_tasks.py | 923 ------------------ ...t_notification_dao_delete_notifications.py | 28 +- tests/app/letters/__init__.py | 0 tests/app/letters/test_letter_utils.py | 402 -------- tests/app/letters/test_returned_letters.py | 27 - tests/app/service/test_rest.py | 359 ------- .../test_send_pdf_letter_notification.py | 111 --- 12 files changed, 38 insertions(+), 2465 deletions(-) delete mode 100644 tests/app/celery/test_letters_pdf_tasks.py delete mode 100644 tests/app/letters/__init__.py delete mode 100644 tests/app/letters/test_letter_utils.py delete mode 100644 tests/app/letters/test_returned_letters.py delete mode 100644 tests/app/service/test_send_pdf_letter_notification.py diff --git a/app/celery/letters_pdf_tasks.py b/app/celery/letters_pdf_tasks.py index 059f0fc940..e40a10b366 100644 --- a/app/celery/letters_pdf_tasks.py +++ b/app/celery/letters_pdf_tasks.py @@ -1,392 +1,51 @@ -import base64 -import math -from base64 import urlsafe_b64encode -from datetime import datetime -from hashlib import sha512 -from json import JSONDecodeError -from uuid import UUID - -from botocore.exceptions import ClientError as BotoClientError -from flask import current_app -from notifications_utils.s3 import s3upload from notifications_utils.statsd_decorators import statsd -from PyPDF2.utils import PdfReadError -from requests import RequestException -from requests import post as requests_post from app import notify_celery -from app.aws import s3 -from app.config import QueueNames, TaskNames from app.cronitor import cronitor -from app.dao.notifications_dao import ( - dao_get_notification_by_reference, - dao_get_notifications_by_references, - dao_update_notification, - dao_update_notifications_by_reference, - get_notification_by_id, - update_notification_status_by_id, -) -from app.errors import VirusScanError -from app.letters.utils import ( - ScanErrorType, - copy_redaction_failed_pdf, - get_file_names_from_error_bucket, - get_folder_name, - get_page_count, - get_reference_from_filename, - move_error_pdf_to_scan_bucket, - move_failed_pdf, - move_scan_to_invalid_pdf_bucket, - upload_letter_pdf, -) -from app.models import ( - KEY_TYPE_TEST, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_VALIDATION_FAILED, - NOTIFICATION_VIRUS_SCAN_FAILED, -) -from celery.exceptions import MaxRetriesExceededError @notify_celery.task(bind=True, name="create-letters-pdf", max_retries=15, default_retry_delay=300) @statsd(namespace="tasks") def create_letters_pdf(self, notification_id): - try: - notification = get_notification_by_id(notification_id, _raise=True) - pdf_data, billable_units = get_letters_pdf( - notification.template, - contact_block=notification.reply_to_text, - filename=notification.service.letter_branding and notification.service.letter_branding.filename, - values=notification.personalisation, - ) - - upload_letter_pdf(notification, pdf_data) - - if notification.key_type != KEY_TYPE_TEST: - notification.billable_units = billable_units - dao_update_notification(notification) - - current_app.logger.info( - "Letter notification reference {reference}: billable units set to {billable_units}".format( - reference=str(notification.reference), billable_units=billable_units - ) - ) - - except (RequestException, BotoClientError): - try: - current_app.logger.exception("Letters PDF notification creation for id: {} failed".format(notification_id)) - self.retry(queue=QueueNames.RETRY) - except MaxRetriesExceededError: - current_app.logger.error( - "RETRY FAILED: task create_letters_pdf failed for notification {}".format(notification_id), - ) - update_notification_status_by_id(notification_id, "technical-failure") + pass def get_letters_pdf(template, contact_block, filename, values): - template_for_letter_print = { - "subject": template.subject, - "content": template.content, - } - - data = { - "letter_contact_block": contact_block, - "template": template_for_letter_print, - "values": values, - "filename": filename, - } - resp = requests_post( - "{}/print.pdf".format(current_app.config["TEMPLATE_PREVIEW_API_HOST"]), - json=data, - headers={"Authorization": "Token {}".format(current_app.config["TEMPLATE_PREVIEW_API_KEY"])}, - ) - resp.raise_for_status() - - pages_per_sheet = 2 - billable_units = math.ceil(int(resp.headers.get("X-pdf-page-count", 0)) / pages_per_sheet) - - return resp.content, billable_units + pass @notify_celery.task(name="collate-letter-pdfs-for-day") @cronitor("collate-letter-pdfs-for-day") def collate_letter_pdfs_for_day(date=None): - if not date: - # Using the truncated date is ok because UTC to BST does not make a difference to the date, - # since it is triggered mid afternoon. - date = datetime.utcnow().strftime("%Y-%m-%d") - - letter_pdfs = sorted( - s3.get_s3_bucket_objects(current_app.config["LETTERS_PDF_BUCKET_NAME"], subfolder=date), - key=lambda letter: letter["Key"], - ) - for i, letters in enumerate(group_letters(letter_pdfs)): - filenames = [letter["Key"] for letter in letters] - - hash = urlsafe_b64encode(sha512("".join(filenames).encode()).digest())[:20].decode() - # eg NOTIFY.2018-12-31.001.Wjrui5nAvObjPd-3GEL-.ZIP - dvla_filename = "NOTIFY.{date}.{num:03}.{hash}.ZIP".format(date=date, num=i + 1, hash=hash) - - current_app.logger.info( - "Calling task zip-and-send-letter-pdfs for {} pdfs to upload {} with total size {:,} bytes".format( - len(filenames), dvla_filename, sum(letter["Size"] for letter in letters) - ) - ) - notify_celery.send_task( - name=TaskNames.ZIP_AND_SEND_LETTER_PDFS, - kwargs={"filenames_to_zip": filenames, "upload_filename": dvla_filename}, - queue=QueueNames.PROCESS_FTP, - compression="zlib", - ) + pass def group_letters(letter_pdfs): - """ - Group letters in chunks of MAX_LETTER_PDF_ZIP_FILESIZE. Will add files to lists, never going over that size. - If a single file is (somehow) larger than MAX_LETTER_PDF_ZIP_FILESIZE that'll be in a list on it's own. - If there are no files, will just exit (rather than yielding an empty list). - """ - running_filesize = 0 - list_of_files = [] - for letter in letter_pdfs: - if letter["Key"].lower().endswith(".pdf") and letter_in_created_state(letter["Key"]): - if ( - running_filesize + letter["Size"] > current_app.config["MAX_LETTER_PDF_ZIP_FILESIZE"] - or len(list_of_files) >= current_app.config["MAX_LETTER_PDF_COUNT_PER_ZIP"] - ): - yield list_of_files - running_filesize = 0 - list_of_files = [] - - running_filesize += letter["Size"] - list_of_files.append(letter) - - if list_of_files: - yield list_of_files + pass def letter_in_created_state(filename): - # filename looks like '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF' - subfolder = filename.split("/")[0] - ref = get_reference_from_filename(filename) - notifications = dao_get_notifications_by_references([ref]) - if notifications: - if notifications[0].status == NOTIFICATION_CREATED: - return True - current_app.logger.info( - "Collating letters for {} but notification with reference {} already in {}".format( - subfolder, ref, notifications[0].status - ) - ) - return False + pass @notify_celery.task(bind=True, name="process-virus-scan-passed", max_retries=15, default_retry_delay=300) def process_virus_scan_passed(self, filename): - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - current_app.logger.info("notification id {} Virus scan passed: {}".format(notification.id, filename)) - - is_test_key = notification.key_type == KEY_TYPE_TEST - - scan_pdf_object = s3.get_s3_object(current_app.config["LETTERS_SCAN_BUCKET_NAME"], filename) - old_pdf = scan_pdf_object.get()["Body"].read() - - try: - billable_units = get_page_count(old_pdf) - except PdfReadError: - current_app.logger.exception(msg="Invalid PDF received for notification_id: {}".format(notification.id)) - _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object) - return - - sanitise_response = _sanitise_precompiled_pdf(self, notification, old_pdf) - if not sanitise_response: - new_pdf = None - else: - sanitise_response = sanitise_response.json() - try: - new_pdf = base64.b64decode(sanitise_response["file"].encode()) - except JSONDecodeError: - new_pdf = sanitise_response.content - - redaction_failed_message = sanitise_response.get("redaction_failed_message") - if redaction_failed_message and not is_test_key: - current_app.logger.info("{} for notification id {} ({})".format(redaction_failed_message, notification.id, filename)) - copy_redaction_failed_pdf(filename) - - # TODO: Remove this once CYSP update their template to not cross over the margins - if notification.service_id == UUID("fe44178f-3b45-4625-9f85-2264a36dd9ec"): # CYSP - # Check your state pension submit letters with good addresses and notify tags, so just use their supplied pdf - new_pdf = old_pdf - - if not new_pdf: - current_app.logger.info("Invalid precompiled pdf received {} ({})".format(notification.id, filename)) - _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object) - return - else: - current_app.logger.info("Validation was successful for precompiled pdf {} ({})".format(notification.id, filename)) - - current_app.logger.info("notification id {} ({}) sanitised and ready to send".format(notification.id, filename)) - - try: - _upload_pdf_to_test_or_live_pdf_bucket(new_pdf, filename, is_test_letter=is_test_key) - - update_letter_pdf_status( - reference=reference, - status=NOTIFICATION_DELIVERED if is_test_key else NOTIFICATION_CREATED, - billable_units=billable_units, - ) - scan_pdf_object.delete() - except BotoClientError: - current_app.logger.exception("Error uploading letter to live pdf bucket for notification: {}".format(notification.id)) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - - -def _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object): - try: - move_scan_to_invalid_pdf_bucket(filename) - scan_pdf_object.delete() - - update_letter_pdf_status( - reference=notification.reference, - status=NOTIFICATION_VALIDATION_FAILED, - billable_units=0, - ) - except BotoClientError: - current_app.logger.exception("Error when moving letter with id {} to invalid PDF bucket".format(notification.id)) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - - -def _upload_pdf_to_test_or_live_pdf_bucket(pdf_data, filename, is_test_letter): - target_bucket_config = "TEST_LETTERS_BUCKET_NAME" if is_test_letter else "LETTERS_PDF_BUCKET_NAME" - target_bucket_name = current_app.config[target_bucket_config] - target_filename = get_folder_name(datetime.utcnow(), is_test_letter) + filename - - s3upload( - filedata=pdf_data, - region=current_app.config["AWS_REGION"], - bucket_name=target_bucket_name, - file_location=target_filename, - ) - - -def _sanitise_precompiled_pdf(self, notification, precompiled_pdf): - try: - response = requests_post( - "{}/precompiled/sanitise".format(current_app.config["TEMPLATE_PREVIEW_API_HOST"]), - data=precompiled_pdf, - headers={ - "Authorization": "Token {}".format(current_app.config["TEMPLATE_PREVIEW_API_KEY"]), - "Service-ID": str(notification.service_id), - "Notification-ID": str(notification.id), - }, - ) - response.raise_for_status() - return response - except RequestException as ex: - if ex.response is not None and ex.response.status_code == 400: - message = "sanitise_precompiled_pdf validation error for notification: {}. ".format(notification.id) - if "message" in response.json(): - message += response.json()["message"] - - current_app.logger.info(message) - return None - - try: - current_app.logger.exception("sanitise_precompiled_pdf failed for notification: {}".format(notification.id)) - self.retry(queue=QueueNames.RETRY) - except MaxRetriesExceededError: - current_app.logger.error( - "RETRY FAILED: sanitise_precompiled_pdf failed for notification {}".format(notification.id), - ) - - notification.status = NOTIFICATION_TECHNICAL_FAILURE - dao_update_notification(notification) - raise + pass @notify_celery.task(name="process-virus-scan-failed") def process_virus_scan_failed(filename): - move_failed_pdf(filename, ScanErrorType.FAILURE) - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - updated_count = update_letter_pdf_status(reference, NOTIFICATION_VIRUS_SCAN_FAILED, billable_units=0) - - if updated_count != 1: - raise Exception( - "There should only be one letter notification for each reference. Found {} notifications".format(updated_count) - ) - - error = VirusScanError("notification id {} Virus scan failed: {}".format(notification.id, filename)) - current_app.logger.exception(error) - raise error + pass @notify_celery.task(name="process-virus-scan-error") def process_virus_scan_error(filename): - move_failed_pdf(filename, ScanErrorType.ERROR) - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - updated_count = update_letter_pdf_status(reference, NOTIFICATION_TECHNICAL_FAILURE, billable_units=0) - - if updated_count != 1: - raise Exception( - "There should only be one letter notification for each reference. Found {} notifications".format(updated_count) - ) - error = VirusScanError("notification id {} Virus scan error: {}".format(notification.id, filename)) - current_app.logger.exception(error) - raise error + pass def update_letter_pdf_status(reference, status, billable_units): - return dao_update_notifications_by_reference( - references=[reference], - update_dict={ - "status": status, - "billable_units": billable_units, - "updated_at": datetime.utcnow(), - }, - )[0] + pass def replay_letters_in_error(filename=None): - # This method can be used to replay letters that end up in the ERROR directory. - # We had an incident where clamAV was not processing the virus scan. - if filename: - move_error_pdf_to_scan_bucket(filename) - # call task to add the filename to anti virus queue - current_app.logger.info("Calling scan_file for: {}".format(filename)) - - if current_app.config["ANTIVIRUS_ENABLED"]: - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={"filename": filename}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - process_virus_scan_passed.apply_async( - kwargs={"filename": filename}, - queue=QueueNames.LETTERS, - ) - else: - error_files = get_file_names_from_error_bucket() - for item in error_files: - moved_file_name = item.key.split("/")[1] - current_app.logger.info("Calling scan_file for: {}".format(moved_file_name)) - move_error_pdf_to_scan_bucket(moved_file_name) - # call task to add the filename to anti virus queue - if current_app.config["ANTIVIRUS_ENABLED"]: - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={"filename": moved_file_name}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - process_virus_scan_passed.apply_async( - kwargs={"filename": moved_file_name}, - queue=QueueNames.LETTERS, - ) + pass diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 9e20bca476..e88df7cbee 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -2,7 +2,6 @@ import string from datetime import datetime, timedelta -from boto.exception import BotoClientError from flask import current_app from itsdangerous import BadSignature from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES @@ -25,11 +24,9 @@ from werkzeug.datastructures import MultiDict from app import create_uuid, db, signer_personalisation -from app.aws.s3 import get_s3_bucket_objects, remove_s3_object from app.dao.dao_utils import transactional from app.dao.date_util import utc_midnight_n_days_ago from app.errors import InvalidRequest -from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE from app.models import ( EMAIL_TYPE, KEY_TYPE_TEST, @@ -391,9 +388,6 @@ def delete_notifications_older_than_retention_by_type(notification_type, qry_lim convert_utc_to_local_timezone(datetime.utcnow()).date() ) - timedelta(days=f.days_of_retention) - if notification_type == LETTER_TYPE: - _delete_letters_from_s3(notification_type, f.service_id, days_of_retention, qry_limit) - insert_update_notification_history(notification_type, days_of_retention, f.service_id) current_app.logger.info("Deleting {} notifications for service id: {}".format(notification_type, f.service_id)) @@ -409,8 +403,6 @@ def delete_notifications_older_than_retention_by_type(notification_type, qry_lim for row in service_ids_to_purge: service_id = row._mapping["id"] - if notification_type == LETTER_TYPE: - _delete_letters_from_s3(notification_type, service_id, seven_days_ago, qry_limit) insert_update_notification_history(notification_type, seven_days_ago, service_id) deleted += _delete_notifications(notification_type, seven_days_ago, service_id, qry_limit) @@ -486,38 +478,6 @@ def insert_update_notification_history(notification_type, date_to_delete_from, s db.session.commit() -def _delete_letters_from_s3(notification_type, service_id, date_to_delete_from, query_limit): - letters_to_delete_from_s3 = ( - db.session.query(Notification) - .filter( - Notification.notification_type == notification_type, - Notification.created_at < date_to_delete_from, - Notification.service_id == service_id, - ) - .limit(query_limit) - .all() - ) - for letter in letters_to_delete_from_s3: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - if letter.sent_at: - sent_at = str(letter.sent_at.date()) - prefix = LETTERS_PDF_FILE_LOCATION_STRUCTURE.format( - folder=sent_at + "/", - reference=letter.reference, - duplex="D", - letter_class="2", - colour="C", - crown="C" if letter.service.crown else "N", - date="", - ).upper()[:-5] - s3_objects = get_s3_bucket_objects(bucket_name=bucket_name, subfolder=prefix) - for s3_object in s3_objects: - try: - remove_s3_object(bucket_name, s3_object["Key"]) - except BotoClientError: - current_app.logger.exception("Could not delete S3 object with filename: {}".format(s3_object["Key"])) - - @statsd(namespace="dao") @transactional def dao_delete_notifications_by_id(notification_id): diff --git a/app/letters/rest.py b/app/letters/rest.py index 21c80f2432..87684e0b66 100644 --- a/app/letters/rest.py +++ b/app/letters/rest.py @@ -1,7 +1,5 @@ -from flask import Blueprint, jsonify, request +from flask import Blueprint -from app.letters.letter_schemas import letter_references -from app.schema_validation import validate from app.v2.errors import register_errors letter_job = Blueprint("letter-job", __name__) @@ -10,6 +8,4 @@ @letter_job.route("/letters/returned", methods=["POST"]) def create_process_returned_letters_job(): - references = validate(request.get_json(), letter_references) - - return jsonify(references=references["references"]), 200 + pass diff --git a/app/letters/utils.py b/app/letters/utils.py index 6369b22040..8d5bcab489 100644 --- a/app/letters/utils.py +++ b/app/letters/utils.py @@ -1,21 +1,6 @@ -import io -import math -from datetime import datetime, timedelta from enum import Enum -import boto3 -from flask import current_app -from notifications_utils.letter_timings import LETTER_PROCESSING_DEADLINE -from notifications_utils.pdf import pdf_page_count -from notifications_utils.s3 import s3upload -from notifications_utils.timezones import convert_utc_to_local_timezone - -from app.models import ( - KEY_TYPE_TEST, - NOTIFICATION_VALIDATION_FAILED, - RESOLVE_POSTAGE_FOR_FILE_NAME, - SECOND_CLASS, -) +from app.models import SECOND_CLASS class ScanErrorType(Enum): @@ -29,203 +14,64 @@ class ScanErrorType(Enum): def get_folder_name(_now, is_test_or_scan_letter=False): - if is_test_or_scan_letter: - folder_name = "" - else: - print_datetime = convert_utc_to_local_timezone(_now) - if print_datetime.time() > LETTER_PROCESSING_DEADLINE: - print_datetime += timedelta(days=1) - folder_name = "{}/".format(print_datetime.date()) - return folder_name + pass def get_letter_pdf_filename(reference, crown, is_scan_letter=False, postage=SECOND_CLASS): - now = datetime.utcnow() - - upload_file_name = LETTERS_PDF_FILE_LOCATION_STRUCTURE.format( - folder=get_folder_name(now, is_scan_letter), - reference=reference, - duplex="D", - letter_class=RESOLVE_POSTAGE_FOR_FILE_NAME[postage], - colour="C", - crown="C" if crown else "N", - date=now.strftime("%Y%m%d%H%M%S"), - ).upper() - - return upload_file_name + pass def get_bucket_name_and_prefix_for_notification(notification): - folder = "" - if notification.status == NOTIFICATION_VALIDATION_FAILED: - bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - elif notification.key_type == KEY_TYPE_TEST: - bucket_name = current_app.config["TEST_LETTERS_BUCKET_NAME"] - else: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - if notification.sent_at: - folder = "{}/".format(notification.sent_at.date()) - elif notification.updated_at: - folder = get_folder_name(notification.updated_at, False) - else: - folder = get_folder_name(notification.created_at, False) - - upload_file_name = PRECOMPILED_BUCKET_PREFIX.format(folder=folder, reference=notification.reference).upper() - - return bucket_name, upload_file_name + pass def get_reference_from_filename(filename): - # filename looks like '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF' - filename_parts = filename.split(".") - return filename_parts[1] + pass def upload_letter_pdf(notification, pdf_data, precompiled=False): - current_app.logger.info( - "PDF Letter {} reference {} created at {}, {} bytes".format( - notification.id, - notification.reference, - notification.created_at, - len(pdf_data), - ) - ) - - upload_file_name = get_letter_pdf_filename( - notification.reference, - notification.service.crown, - is_scan_letter=precompiled or notification.key_type == KEY_TYPE_TEST, - postage=notification.postage, - ) - - if precompiled: - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - elif notification.key_type == KEY_TYPE_TEST: - bucket_name = current_app.config["TEST_LETTERS_BUCKET_NAME"] - else: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - - s3upload( - filedata=pdf_data, - region=current_app.config["AWS_REGION"], - bucket_name=bucket_name, - file_location=upload_file_name, - ) - - current_app.logger.info( - "Uploaded letters PDF {} to {} for notification id {}".format(upload_file_name, bucket_name, notification.id) - ) - return upload_file_name + pass def move_failed_pdf(source_filename, scan_error_type): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - target_filename = ("ERROR/" if scan_error_type == ScanErrorType.ERROR else "FAILURE/") + source_filename - - _move_s3_object(scan_bucket, source_filename, scan_bucket, target_filename) + pass def copy_redaction_failed_pdf(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - target_filename = "REDACTION_FAILURE/" + source_filename - - _copy_s3_object(scan_bucket, source_filename, scan_bucket, target_filename) + pass def move_error_pdf_to_scan_bucket(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - error_file = "ERROR/" + source_filename - - _move_s3_object(scan_bucket, error_file, scan_bucket, source_filename) + pass def move_scan_to_invalid_pdf_bucket(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - invalid_pdf_bucket = current_app.config["INVALID_PDF_BUCKET_NAME"] - _move_s3_object(scan_bucket, source_filename, invalid_pdf_bucket, source_filename) + pass def move_uploaded_pdf_to_letters_bucket(source_filename, upload_filename): - _move_s3_object( - source_bucket=current_app.config["TRANSIENT_UPLOADED_LETTERS"], - source_filename=source_filename, - target_bucket=current_app.config["LETTERS_PDF_BUCKET_NAME"], - target_filename=upload_filename, - ) + pass def get_file_names_from_error_bucket(): - s3 = boto3.resource("s3") - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - bucket = s3.Bucket(scan_bucket) - - return bucket.objects.filter(Prefix="ERROR") + pass def get_letter_pdf(notification): - bucket_name, prefix = get_bucket_name_and_prefix_for_notification(notification) - - s3 = boto3.resource("s3") - bucket = s3.Bucket(bucket_name) - item = next(x for x in bucket.objects.filter(Prefix=prefix)) - - obj = s3.Object(bucket_name=bucket_name, key=item.key) - return obj.get()["Body"].read() + pass def _move_s3_object(source_bucket, source_filename, target_bucket, target_filename): - s3 = boto3.resource("s3") - copy_source = {"Bucket": source_bucket, "Key": source_filename} - - target_bucket = s3.Bucket(target_bucket) - obj = target_bucket.Object(target_filename) - - # Tags are copied across but the expiration time is reset in the destination bucket - # e.g. if a file has 5 days left to expire on a ONE_WEEK retention in the source bucket, - # in the destination bucket the expiration time will be reset to 7 days left to expire - obj.copy(copy_source, ExtraArgs={"ServerSideEncryption": "AES256"}) - - s3.Object(source_bucket, source_filename).delete() - - current_app.logger.info( - "Moved letter PDF: {}/{} to {}/{}".format(source_bucket, source_filename, target_bucket, target_filename) - ) + pass def _copy_s3_object(source_bucket, source_filename, target_bucket, target_filename): - s3 = boto3.resource("s3") - copy_source = {"Bucket": source_bucket, "Key": source_filename} - - target_bucket = s3.Bucket(target_bucket) - obj = target_bucket.Object(target_filename) - - # Tags are copied across but the expiration time is reset in the destination bucket - # e.g. if a file has 5 days left to expire on a ONE_WEEK retention in the source bucket, - # in the destination bucket the expiration time will be reset to 7 days left to expire - obj.copy(copy_source, ExtraArgs={"ServerSideEncryption": "AES256"}) - - current_app.logger.info( - "Copied letter PDF: {}/{} to {}/{}".format(source_bucket, source_filename, target_bucket, target_filename) - ) + pass def letter_print_day(created_at): - bst_print_datetime = convert_utc_to_local_timezone(created_at) + timedelta(hours=6, minutes=30) - bst_print_date = bst_print_datetime.date() - - current_bst_date = convert_utc_to_local_timezone(datetime.utcnow()).date() - - if bst_print_date >= current_bst_date: - return "today" - else: - print_date = bst_print_datetime.strftime("%d %B").lstrip("0") - return "on {}".format(print_date) + pass def get_page_count(pdf): - pages = pdf_page_count(io.BytesIO(pdf)) - pages_per_sheet = 2 - billable_units = math.ceil(pages / pages_per_sheet) - return billable_units + pass diff --git a/app/service/rest.py b/app/service/rest.py index 8ecf13f47d..8ba79097e3 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -11,7 +11,6 @@ over_email_daily_limit_cache_key, over_sms_daily_limit_cache_key, ) -from notifications_utils.letter_timings import letter_can_be_cancelled from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy import func from sqlalchemy.exc import IntegrityError @@ -53,13 +52,6 @@ dao_get_reply_to_by_service_id, update_reply_to_email_address, ) -from app.dao.service_letter_contact_dao import ( - add_letter_contact_for_service, - archive_letter_contact, - dao_get_letter_contact_by_id, - dao_get_letter_contacts_by_service_id, - update_letter_contact, -) from app.dao.service_safelist_dao import ( dao_add_and_commit_safelisted_contacts, dao_fetch_service_safelist, @@ -93,7 +85,6 @@ from app.dao.templates_dao import dao_get_template_by_id from app.dao.users_dao import get_user_by_id from app.errors import InvalidRequest, register_errors -from app.letters.utils import letter_print_day from app.models import ( KEY_TYPE_NORMAL, LETTER_TYPE, @@ -117,10 +108,7 @@ service_schema, ) from app.service import statistics -from app.service.send_notification import ( - send_one_off_notification, - send_pdf_letter_notification, -) +from app.service.send_notification import send_one_off_notification from app.service.sender import send_notification_to_service_users from app.service.service_data_retention_schema import ( add_service_data_retention_request, @@ -128,7 +116,6 @@ ) from app.service.service_senders_schema import ( add_service_email_reply_to_request, - add_service_letter_contact_block_request, add_service_sms_sender_request, ) from app.service.utils import ( @@ -575,13 +562,6 @@ def cancel_notification_for_service(service_id, notification_id): "Notification cannot be cancelled - only letters can be cancelled", status_code=400, ) - elif not letter_can_be_cancelled(notification.status, notification.created_at): - print_day = letter_print_day(notification.created_at) - - raise InvalidRequest( - "It’s too late to cancel this letter. Printing started {} at 5.30pm".format(print_day), - status_code=400, - ) updated_notification = notifications_dao.update_notification_status_by_id( notification_id, @@ -793,8 +773,7 @@ def create_one_off_notification(service_id): @service_blueprint.route("//send-pdf-letter", methods=["POST"]) def create_pdf_letter(service_id): - resp = send_pdf_letter_notification(service_id, request.get_json()) - return jsonify(resp), 201 + pass @service_blueprint.route("//email-reply-to", methods=["GET"]) @@ -872,41 +851,22 @@ def delete_service_reply_to_email_address(service_id, reply_to_email_id): @service_blueprint.route("//letter-contact", methods=["GET"]) def get_letter_contacts(service_id): - result = dao_get_letter_contacts_by_service_id(service_id) - return jsonify([i.serialize() for i in result]), 200 + pass @service_blueprint.route("//letter-contact/", methods=["GET"]) def get_letter_contact_by_id(service_id, letter_contact_id): - result = dao_get_letter_contact_by_id(service_id=service_id, letter_contact_id=letter_contact_id) - return jsonify(result.serialize()), 200 + pass @service_blueprint.route("//letter-contact", methods=["POST"]) def add_service_letter_contact(service_id): - # validate the service exists, throws ResultNotFound exception. - dao_fetch_service_by_id(service_id) - form = validate(request.get_json(), add_service_letter_contact_block_request) - new_letter_contact = add_letter_contact_for_service( - service_id=service_id, - contact_block=form["contact_block"], - is_default=form.get("is_default", True), - ) - return jsonify(data=new_letter_contact.serialize()), 201 + pass @service_blueprint.route("//letter-contact/", methods=["POST"]) def update_service_letter_contact(service_id, letter_contact_id): - # validate the service exists, throws ResultNotFound exception. - dao_fetch_service_by_id(service_id) - form = validate(request.get_json(), add_service_letter_contact_block_request) - new_reply_to = update_letter_contact( - service_id=service_id, - letter_contact_id=letter_contact_id, - contact_block=form["contact_block"], - is_default=form.get("is_default", True), - ) - return jsonify(data=new_reply_to.serialize()), 200 + pass @service_blueprint.route( @@ -914,9 +874,7 @@ def update_service_letter_contact(service_id, letter_contact_id): methods=["POST"], ) def delete_service_letter_contact(service_id, letter_contact_id): - archived_letter_contact = archive_letter_contact(service_id, letter_contact_id) - - return jsonify(data=archived_letter_contact.serialize()), 200 + pass @service_blueprint.route("//sms-sender", methods=["POST"]) diff --git a/tests/app/celery/test_letters_pdf_tasks.py b/tests/app/celery/test_letters_pdf_tasks.py deleted file mode 100644 index 34eb4589c8..0000000000 --- a/tests/app/celery/test_letters_pdf_tasks.py +++ /dev/null @@ -1,923 +0,0 @@ -import base64 -from unittest.mock import ANY, Mock, call - -import boto3 -import pytest -import requests_mock -from botocore.exceptions import ClientError -from flask import current_app -from freezegun import freeze_time -from moto import mock_s3 -from PyPDF2.utils import PdfReadError -from requests import RequestException -from sqlalchemy.orm.exc import NoResultFound - -from app.celery.letters_pdf_tasks import ( - _move_invalid_letter_and_update_status, - _sanitise_precompiled_pdf, - collate_letter_pdfs_for_day, - create_letters_pdf, - get_letters_pdf, - group_letters, - letter_in_created_state, - process_virus_scan_error, - process_virus_scan_failed, - process_virus_scan_passed, - replay_letters_in_error, -) -from app.errors import VirusScanError -from app.letters.utils import ScanErrorType -from app.models import ( - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_PENDING_VIRUS_CHECK, - NOTIFICATION_SENDING, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_VALIDATION_FAILED, - NOTIFICATION_VIRUS_SCAN_FAILED, - Notification, -) -from celery.exceptions import MaxRetriesExceededError, Retry -from tests.app.db import create_letter_branding, create_notification, save_notification -from tests.conftest import set_config_values - - -@pytest.mark.skip(reason="Letter tests") -def test_should_have_decorated_tasks_functions(): - assert create_letters_pdf.__wrapped__.__name__ == "create_letters_pdf" - assert collate_letter_pdfs_for_day.__wrapped__.__name__ == "collate_letter_pdfs_for_day" - assert process_virus_scan_passed.__wrapped__.__name__ == "process_virus_scan_passed" - assert process_virus_scan_failed.__wrapped__.__name__ == "process_virus_scan_failed" - assert process_virus_scan_error.__wrapped__.__name__ == "process_virus_scan_error" - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize("personalisation", [{"name": "test"}, None]) -def test_get_letters_pdf_calls_notifications_template_preview_service_correctly( - notify_api, mocker, client, sample_letter_template, personalisation -): - contact_block = "Mr Foo,\n1 Test Street,\nLondon\nN1" - filename = "opg" - - with set_config_values( - notify_api, - { - "TEMPLATE_PREVIEW_API_HOST": "http://localhost/notifications-template-preview", - "TEMPLATE_PREVIEW_API_KEY": "test-key", - }, - ): - with requests_mock.Mocker() as request_mock: - mock_post = request_mock.post( - "http://localhost/notifications-template-preview/print.pdf", - content=b"\x00\x01", - status_code=200, - ) - - get_letters_pdf( - sample_letter_template, - contact_block=contact_block, - filename=filename, - values=personalisation, - ) - - assert mock_post.last_request.json() == { - "values": personalisation, - "letter_contact_block": contact_block, - "filename": filename, - "template": { - "subject": sample_letter_template.subject, - "content": sample_letter_template.content, - }, - } - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize("page_count,expected_billable_units", [("1", 1), ("2", 1), ("3", 2)]) -def test_get_letters_pdf_calculates_billing_units( - notify_api, - mocker, - client, - sample_letter_template, - page_count, - expected_billable_units, -): - contact_block = "Mr Foo,\n1 Test Street,\nLondon\nN1" - filename = "opg" - - with set_config_values( - notify_api, - { - "TEMPLATE_PREVIEW_API_HOST": "http://localhost/notifications-template-preview", - "TEMPLATE_PREVIEW_API_KEY": "test-key", - }, - ): - with requests_mock.Mocker() as request_mock: - request_mock.post( - "http://localhost/notifications-template-preview/print.pdf", - content=b"\x00\x01", - headers={"X-pdf-page-count": page_count}, - status_code=200, - ) - - _, billable_units = get_letters_pdf( - sample_letter_template, - contact_block=contact_block, - filename=filename, - values=None, - ) - - assert billable_units == expected_billable_units - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-12-04 17:31:00") -def test_create_letters_pdf_calls_s3upload(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", "1")) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - create_letters_pdf(sample_letter_notification.id) - - mock_s3.assert_called_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - file_location="2017-12-04/NOTIFY.FOO.D.2.C.C.20171204173100.PDF", - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-12-04 17:31:00") -def test_create_letters_pdf_calls_s3upload_for_test_letters(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", "1")) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - sample_letter_notification.key_type = "test" - - create_letters_pdf(sample_letter_notification.id) - - mock_s3.assert_called_with( - bucket_name=current_app.config["TEST_LETTERS_BUCKET_NAME"], - file_location="NOTIFY.FOO.D.2.C.C.20171204173100.PDF", - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_sets_billable_units(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - - create_letters_pdf(sample_letter_notification.id) - noti = Notification.query.filter(Notification.reference == sample_letter_notification.reference).one() - assert noti.billable_units == 1 - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_non_existent_notification(notify_api, mocker, fake_uuid): - with pytest.raises(expected_exception=NoResultFound): - create_letters_pdf(fake_uuid) - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_handles_request_errors(mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", side_effect=RequestException) - mock_retry = mocker.patch("app.celery.letters_pdf_tasks.create_letters_pdf.retry") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_get_letters_pdf.called - assert mock_retry.called - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_handles_s3_errors(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mock_s3 = mocker.patch( - "app.letters.utils.s3upload", - side_effect=ClientError(error_response, "operation_name"), - ) - mock_retry = mocker.patch("app.celery.letters_pdf_tasks.create_letters_pdf.retry") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_s3.called - assert mock_retry.called - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_sets_technical_failure_max_retries(mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", side_effect=RequestException) - mock_retry = mocker.patch( - "app.celery.letters_pdf_tasks.create_letters_pdf.retry", - side_effect=MaxRetriesExceededError, - ) - mock_update_noti = mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_get_letters_pdf.called - assert mock_retry.called - mock_update_noti.assert_called_once_with(sample_letter_notification.id, "technical-failure") - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_gets_the_right_logo_when_service_has_no_logo(notify_api, mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - mock_get_letters_pdf.assert_called_once_with( - sample_letter_notification.template, - contact_block=sample_letter_notification.reply_to_text, - filename=None, - values=sample_letter_notification.personalisation, - ) - - -# We only need this while we are migrating to the new letter_branding model -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_gets_the_right_logo_when_service_has_letter_branding_logo(notify_api, mocker, sample_letter_notification): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - sample_letter_notification.service.letter_branding = letter_branding - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - mock_get_letters_pdf.assert_called_once_with( - sample_letter_notification.template, - contact_block=sample_letter_notification.reply_to_text, - filename=sample_letter_notification.service.letter_branding.filename, - values=sample_letter_notification.personalisation, - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_collate_letter_pdfs_for_day(notify_api, mocker): - mock_s3 = mocker.patch( - "app.celery.tasks.s3.get_s3_bucket_objects", - return_value=[ - {"Key": "B.pDf", "Size": 2}, - {"Key": "A.PDF", "Size": 1}, - {"Key": "C.pdf", "Size": 3}, - ], - ) - mock_group_letters = mocker.patch( - "app.celery.letters_pdf_tasks.group_letters", - return_value=[ - [{"Key": "A.PDF", "Size": 1}, {"Key": "B.pDf", "Size": 2}], - [{"Key": "C.pdf", "Size": 3}], - ], - ) - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - - collate_letter_pdfs_for_day("2017-01-02") - - mock_s3.assert_called_once_with("test-letters-pdf", subfolder="2017-01-02") - mock_group_letters.assert_called_once_with(sorted(mock_s3.return_value, key=lambda x: x["Key"])) - assert mock_celery.call_args_list[0] == call( - name="zip-and-send-letter-pdfs", - kwargs={ - "filenames_to_zip": ["A.PDF", "B.pDf"], - "upload_filename": "NOTIFY.2017-01-02.001.oqdjIM2-NAUU9Sm5Slmi.ZIP", - }, - queue="process-ftp-tasks", - compression="zlib", - ) - assert mock_celery.call_args_list[1] == call( - name="zip-and-send-letter-pdfs", - kwargs={ - "filenames_to_zip": ["C.pdf"], - "upload_filename": "NOTIFY.2017-01-02.002.tdr7hcdPieiqjkVoS4kU.ZIP", - }, - queue="process-ftp-tasks", - compression="zlib", - ) - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-09-12 17:50:00") -def test_collate_letter_pdfs_for_day_works_without_date_param(notify_api, mocker): - mock_s3 = mocker.patch("app.celery.tasks.s3.get_s3_bucket_objects") - collate_letter_pdfs_for_day() - expected_date = "2018-09-12" - mock_s3.assert_called_once_with("test-letters-pdf", subfolder=expected_date) - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_splits_on_file_size(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - # ends under max but next one is too big - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - # ends on exactly max - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - # exactly max goes in next file - {"Key": "F.pdf", "Size": 5}, - # if it's bigger than the max, still gets included - {"Key": "G.pdf", "Size": 6}, - # whatever's left goes in last list - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - - with set_config_values(notify_api, {"MAX_LETTER_PDF_ZIP_FILESIZE": 5}): - x = group_letters(letters) - - assert next(x) == [{"Key": "A.pdf", "Size": 1}, {"Key": "B.pdf", "Size": 2}] - assert next(x) == [ - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "F.pdf", "Size": 5}] - assert next(x) == [{"Key": "G.pdf", "Size": 6}] - assert next(x) == [{"Key": "H.pdf", "Size": 1}, {"Key": "I.pdf", "Size": 1}] - # make sure iterator is exhausted - assert next(x, None) is None - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_splits_on_file_count(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - {"Key": "F.pdf", "Size": 5}, - {"Key": "G.pdf", "Size": 6}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - - with set_config_values(notify_api, {"MAX_LETTER_PDF_COUNT_PER_ZIP": 3}): - x = group_letters(letters) - - assert next(x) == [ - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - {"Key": "C.pdf", "Size": 3}, - ] - assert next(x) == [ - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - {"Key": "F.pdf", "Size": 5}, - ] - assert next(x) == [ - {"Key": "G.pdf", "Size": 6}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - # make sure iterator is exhausted - assert next(x, None) is None - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_splits_on_file_size_and_file_count(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - # ends under max file size but next file is too big - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - # ends on exactly max number of files and file size - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - # exactly max file size goes in next file - {"Key": "F.pdf", "Size": 5}, - # file size is within max but number of files reaches limit - {"Key": "G.pdf", "Size": 1}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - # whatever's left goes in last list - {"Key": "J.pdf", "Size": 1}, - ] - - with set_config_values( - notify_api, - {"MAX_LETTER_PDF_ZIP_FILESIZE": 5, "MAX_LETTER_PDF_COUNT_PER_ZIP": 3}, - ): - x = group_letters(letters) - - assert next(x) == [{"Key": "A.pdf", "Size": 1}, {"Key": "B.pdf", "Size": 2}] - assert next(x) == [ - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "F.pdf", "Size": 5}] - assert next(x) == [ - {"Key": "G.pdf", "Size": 1}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "J.pdf", "Size": 1}] - # make sure iterator is exhausted - assert next(x, None) is None - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_ignores_non_pdfs(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [{"Key": "A.zip"}] - assert list(group_letters(letters)) == [] - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_ignores_notifications_already_sent(notify_api, mocker): - mock = mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=False) - letters = [{"Key": "A.pdf"}] - assert list(group_letters(letters)) == [] - mock.assert_called_once_with("A.pdf") - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_with_no_letters(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - assert list(group_letters([])) == [] - - -@pytest.mark.skip(reason="Letter tests") -def test_letter_in_created_state(sample_notification): - sample_notification.reference = "ABCDEF1234567890" - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - - assert letter_in_created_state(filename) is True - - -@pytest.mark.skip(reason="Letter tests") -def test_letter_in_created_state_fails_if_notification_not_in_created( - sample_notification, -): - sample_notification.reference = "ABCDEF1234567890" - sample_notification.status = NOTIFICATION_SENDING - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - assert letter_in_created_state(filename) is False - - -@pytest.mark.skip(reason="Letter tests") -def test_letter_in_created_state_fails_if_notification_doesnt_exist( - sample_notification, -): - sample_notification.reference = "QWERTY1234567890" - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - assert letter_in_created_state(filename) is False - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize( - "key_type,noti_status,bucket_config_name,destination_folder", - [ - ( - KEY_TYPE_NORMAL, - NOTIFICATION_CREATED, - "LETTERS_PDF_BUCKET_NAME", - "2018-01-01/", - ), - (KEY_TYPE_TEST, NOTIFICATION_DELIVERED, "TEST_LETTERS_BUCKET_NAME", ""), - ], -) -def test_process_letter_task_check_virus_scan_passed( - sample_letter_template, - mocker, - key_type, - noti_status, - bucket_config_name, - destination_folder, -): - letter_notification = save_notification( - create_notification( - template=sample_letter_template, - billable_units=0, - status="pending-virus-check", - key_type=key_type, - reference="{} letter".format(key_type), - ) - ) - filename = "NOTIFY.{}".format(letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config[bucket_config_name] - - conn = boto3.resource("s3") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"old_pdf") - - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=1) - mock_s3upload = mocker.patch("app.celery.letters_pdf_tasks.s3upload") - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert letter_notification.status == noti_status - assert letter_notification.billable_units == 1 - assert rmock.called - assert rmock.request_history[0].url == endpoint - - mock_s3upload.assert_called_once_with( - bucket_name=target_bucket_name, - filedata=b"new_pdf", - file_location=destination_folder + filename, - region="ca-central-1", - ) - mock_get_page_count.assert_called_once_with(b"old_pdf") - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize("key_type", [KEY_TYPE_NORMAL, KEY_TYPE_TEST]) -def test_process_letter_task_check_virus_scan_passed_when_sanitise_fails(sample_letter_notification, mocker, key_type): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - - conn = boto3.resource("s3") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_move_s3 = mocker.patch("app.letters.utils._move_s3_object") - mock_sanitise = mocker.patch("app.celery.letters_pdf_tasks._sanitise_precompiled_pdf", return_value=None) - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=2) - - process_virus_scan_passed(filename) - - assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED - assert sample_letter_notification.billable_units == 0 - mock_sanitise.assert_called_once_with(ANY, sample_letter_notification, b"pdf_content") - mock_move_s3.assert_called_once_with(source_bucket_name, filename, target_bucket_name, filename) - - mock_get_page_count.assert_called_once_with(b"pdf_content") - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize( - "key_type,notification_status,bucket_config_name", - [ - (KEY_TYPE_NORMAL, NOTIFICATION_CREATED, "LETTERS_PDF_BUCKET_NAME"), - (KEY_TYPE_TEST, NOTIFICATION_DELIVERED, "TEST_LETTERS_BUCKET_NAME"), - ], -) -def test_process_letter_task_check_virus_scan_passed_when_redaction_fails( - sample_letter_notification, - mocker, - key_type, - notification_status, - bucket_config_name, -): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config[bucket_config_name] - - conn = boto3.resource("s3") - conn.create_bucket(Bucket=bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_copy_s3 = mocker.patch("app.letters.utils._copy_s3_object") - mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=2) - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "redaction_failed_message": "No matches for address block during redaction procedure", - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert sample_letter_notification.billable_units == 2 - assert sample_letter_notification.status == notification_status - if key_type == KEY_TYPE_NORMAL: - mock_copy_s3.assert_called_once_with(bucket_name, filename, bucket_name, "REDACTION_FAILURE/" + filename) - else: - mock_copy_s3.assert_not_called() - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize("key_type", [KEY_TYPE_NORMAL, KEY_TYPE_TEST]) -def test_process_letter_task_check_virus_scan_passed_when_file_cannot_be_opened(sample_letter_notification, mocker, key_type): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - - conn = boto3.resource("s3") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_move_s3 = mocker.patch("app.letters.utils._move_s3_object") - - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", side_effect=PdfReadError) - mock_sanitise = mocker.patch("app.celery.letters_pdf_tasks._sanitise_precompiled_pdf") - - process_virus_scan_passed(filename) - - mock_sanitise.assert_not_called() - mock_get_page_count.assert_called_once_with(b"pdf_content") - mock_move_s3.assert_called_once_with(source_bucket_name, filename, target_bucket_name, filename) - assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED - assert sample_letter_notification.billable_units == 0 - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -def test_process_virus_scan_passed_logs_error_and_sets_tech_failure_if_s3_error_uploading_to_live_bucket( - mocker, - sample_letter_notification, -): - mock_logger = mocker.patch("app.celery.tasks.current_app.logger.exception") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - conn = boto3.resource("s3") - conn.create_bucket(Bucket=source_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=1) - - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mocker.patch( - "app.celery.letters_pdf_tasks._upload_pdf_to_test_or_live_pdf_bucket", - side_effect=ClientError(error_response, "operation_name"), - ) - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - mock_logger.assert_called_once_with( - "Error uploading letter to live pdf bucket for notification: {}".format(sample_letter_notification.id) - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_move_invalid_letter_and_update_status_logs_error_and_sets_tech_failure_state_if_s3_error( - mocker, - sample_letter_notification, -): - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mocker.patch( - "app.celery.letters_pdf_tasks.move_scan_to_invalid_pdf_bucket", - side_effect=ClientError(error_response, "operation_name"), - ) - mock_logger = mocker.patch("app.celery.tasks.current_app.logger.exception") - - _move_invalid_letter_and_update_status(sample_letter_notification, "filename", mocker.Mock()) - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - mock_logger.assert_called_once_with( - "Error when moving letter with id {} to invalid PDF bucket".format(sample_letter_notification.id) - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_process_letter_task_check_virus_scan_failed(sample_letter_notification, mocker): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_move_failed_pdf = mocker.patch("app.celery.letters_pdf_tasks.move_failed_pdf") - - with pytest.raises(VirusScanError) as e: - process_virus_scan_failed(filename) - - assert "Virus scan failed:" in str(e) - mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.FAILURE) - assert sample_letter_notification.status == NOTIFICATION_VIRUS_SCAN_FAILED - - -@pytest.mark.skip(reason="Letter tests") -def test_process_letter_task_check_virus_scan_error(sample_letter_notification, mocker): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_move_failed_pdf = mocker.patch("app.celery.letters_pdf_tasks.move_failed_pdf") - - with pytest.raises(VirusScanError) as e: - process_virus_scan_error(filename) - - assert "Virus scan error:" in str(e.value) - mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.ERROR) - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - - -@pytest.mark.skip(reason="Letter tests") -def test_replay_letters_in_error_for_all_letters_in_error_bucket(notify_api, mocker): - mockObject = boto3.resource("s3").Object("ERROR", "ERROR/file_name") - mocker.patch( - "app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", - return_value=[mockObject], - ) - mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket") - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - replay_letters_in_error() - mock_move.assert_called_once_with("file_name") - mock_celery.assert_called_once_with(name="scan-file", kwargs={"filename": "file_name"}, queue="antivirus-tasks") - - -@pytest.mark.skip(reason="Letter tests") -def test_replay_letters_in_error_for_one_file(notify_api, mocker): - mockObject = boto3.resource("s3").Object("ERROR", "ERROR/file_name") - mocker.patch( - "app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", - return_value=[mockObject], - ) - mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket") - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - replay_letters_in_error("file_name") - mock_move.assert_called_once_with("file_name") - mock_celery.assert_called_once_with(name="scan-file", kwargs={"filename": "file_name"}, queue="antivirus-tasks") - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_returns_data_from_template_preview(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - response = _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - assert rmock.called - assert rmock.request_history[0].url == endpoint - - assert base64.b64decode(response.json()["file"].encode()) == b"new_pdf" - assert rmock.last_request.text == "old_pdf" - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_returns_none_on_validation_error(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"nyan").decode("utf-8"), - "validation_passed": False, - "errors": { - "content_outside_of_printable_area": [1], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=400, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - response = _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - assert rmock.called - assert rmock.request_history[0].url == endpoint - - assert response is None - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_passes_the_service_id_and_notification_id_to_template_preview( - mocker, - sample_letter_notification, -): - tp_mock = mocker.patch("app.celery.letters_pdf_tasks.requests_post") - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_celery = Mock(**{"retry.side_effect": Retry}) - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - service_id = str(sample_letter_notification.service_id) - notification_id = str(sample_letter_notification.id) - - tp_mock.assert_called_once_with( - "http://localhost:9999/precompiled/sanitise", - data=b"old_pdf", - headers={ - "Authorization": "Token my-secret-key", - "Service-ID": service_id, - "Notification-ID": notification_id, - }, - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_retries_on_http_error(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - rmock.post( - "http://localhost:9999/precompiled/sanitise", - content=b"new_pdf", - status_code=500, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - - with pytest.raises(Retry): - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_sets_notification_to_technical_failure_after_too_many_errors(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - rmock.post( - "http://localhost:9999/precompiled/sanitise", - content=b"new_pdf", - status_code=500, - ) - mock_celery = Mock(**{"retry.side_effect": MaxRetriesExceededError}) - - with pytest.raises(MaxRetriesExceededError): - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index 581da5add7..2601522739 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -1,7 +1,6 @@ from datetime import date, datetime, timedelta import pytest -from flask import current_app from freezegun import freeze_time from app.dao.notifications_dao import ( @@ -99,7 +98,7 @@ def _create_templates(sample_service): @pytest.mark.parametrize("month, delete_run_time", [(4, "2016-04-10 23:40"), (1, "2016-01-11 00:40")]) @pytest.mark.parametrize( "notification_type, expected_sms_count, expected_email_count, expected_letter_count", - [("sms", 7, 10, 10), ("email", 10, 7, 10), ("letter", 10, 10, 7)], + [("sms", 7, 10, 10), ("email", 10, 7, 10)], ) def test_should_delete_notifications_by_type_after_seven_days( sample_service, @@ -111,7 +110,6 @@ def test_should_delete_notifications_by_type_after_seven_days( expected_email_count, expected_letter_count, ): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") email_template, letter_template, sms_template = _create_templates(sample_service) # create one notification a day between 1st and 10th from 11:00 to 19:00 of each type for i in range(1, 11): @@ -157,7 +155,6 @@ def test_should_delete_notifications_by_type_after_seven_days( @freeze_time("2016-01-10 12:00:00.000000") def test_should_not_delete_notification_history(sample_service, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") with freeze_time("2016-01-01 12:00"): email_template, letter_template, sms_template = _create_templates(sample_service) save_notification(create_notification(template=email_template, status="permanent-failure")) @@ -169,22 +166,13 @@ def test_should_not_delete_notification_history(sample_service, mocker): assert NotificationHistory.query.count() == 1 -@pytest.mark.parametrize("notification_type", ["sms", "email", "letter"]) +@pytest.mark.parametrize("notification_type", ["sms", "email"]) def test_delete_notifications_for_days_of_retention(sample_service, notification_type, mocker): - mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") create_test_data(notification_type, sample_service) assert Notification.query.count() == 9 delete_notifications_older_than_retention_by_type(notification_type) assert Notification.query.count() == 7 assert Notification.query.filter_by(notification_type=notification_type).count() == 1 - if notification_type == "letter": - mock_get_s3.assert_called_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - subfolder="{}/NOTIFY.LETTER_REF.D.2.C.C".format(str(datetime.utcnow().date())), - ) - assert mock_get_s3.call_count == 2 - else: - mock_get_s3.assert_not_called() def test_delete_notifications_inserts_notification_history(sample_service): @@ -197,7 +185,6 @@ def test_delete_notifications_inserts_notification_history(sample_service): def test_delete_notifications_updates_notification_history(sample_email_template, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") notification = save_notification( create_notification(template=sample_email_template, created_at=datetime.utcnow() - timedelta(days=8)) ) @@ -232,7 +219,6 @@ def test_delete_notifications_keep_data_for_days_of_retention_is_longer(sample_s def test_delete_notifications_with_test_keys(sample_template, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") save_notification( create_notification( template=sample_template, @@ -279,18 +265,8 @@ def test_delete_notifications_delete_notification_type_for_default_time_if_no_da assert Notification.query.filter_by(notification_type="email").count() == 1 -def test_delete_notifications_does_try_to_delete_from_s3_when_letter_has_not_been_sent(sample_service, mocker): - mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") - letter_template = create_template(service=sample_service, template_type="letter") - - save_notification(create_notification(template=letter_template, status="sending", reference="LETTER_REF")) - delete_notifications_older_than_retention_by_type("email", qry_limit=1) - mock_get_s3.assert_not_called() - - @freeze_time("2016-01-10 12:00:00.000000") def test_should_not_delete_notification_if_history_does_not_exist(sample_service, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") mocker.patch("app.dao.notifications_dao.insert_update_notification_history") with freeze_time("2016-01-01 12:00"): email_template, letter_template, sms_template = _create_templates(sample_service) diff --git a/tests/app/letters/__init__.py b/tests/app/letters/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/app/letters/test_letter_utils.py b/tests/app/letters/test_letter_utils.py deleted file mode 100644 index ad4c05e56f..0000000000 --- a/tests/app/letters/test_letter_utils.py +++ /dev/null @@ -1,402 +0,0 @@ -from datetime import datetime - -import boto3 -import pytest -from flask import current_app -from freezegun import freeze_time -from moto import mock_s3 - -from app.letters.utils import ( - ScanErrorType, - copy_redaction_failed_pdf, - get_bucket_name_and_prefix_for_notification, - get_folder_name, - get_letter_pdf, - get_letter_pdf_filename, - letter_print_day, - move_failed_pdf, - upload_letter_pdf, -) -from app.models import ( - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - NOTIFICATION_VALIDATION_FAILED, - PRECOMPILED_TEMPLATE_NAME, -) -from tests.app.db import create_notification, save_notification - -FROZEN_DATE_TIME = "2018-03-14 17:00:00" - - -@pytest.mark.skip(reason="Letter tests") -@pytest.fixture(name="sample_precompiled_letter_notification") -def _sample_precompiled_letter_notification(sample_letter_notification): - sample_letter_notification.template.hidden = True - sample_letter_notification.template.name = PRECOMPILED_TEMPLATE_NAME - sample_letter_notification.reference = "foo" - with freeze_time(FROZEN_DATE_TIME): - sample_letter_notification.created_at = datetime.utcnow() - sample_letter_notification.updated_at = datetime.utcnow() - return sample_letter_notification - - -@pytest.mark.skip(reason="Letter tests") -@pytest.fixture(name="sample_precompiled_letter_notification_using_test_key") -def _sample_precompiled_letter_notification_using_test_key( - sample_precompiled_letter_notification, -): - sample_precompiled_letter_notification.key_type = KEY_TYPE_TEST - return sample_precompiled_letter_notification - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "created_at,folder", - [ - (datetime(2017, 1, 1, 17, 29), "2017-01-01"), - (datetime(2017, 1, 1, 17, 31), "2017-01-02"), - ], -) -@pytest.mark.skip(reason="Letter feature") -def test_get_bucket_name_and_prefix_for_notification_valid_notification(sample_notification, created_at, folder): - sample_notification.created_at = created_at - sample_notification.updated_at = created_at - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert bucket_prefix == "{folder}/NOTIFY.{reference}".format(folder=folder, reference=sample_notification.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -def test_get_bucket_name_and_prefix_for_notification_get_from_sent_at_date( - sample_notification, -): - sample_notification.created_at = datetime(2019, 8, 1, 17, 35) - sample_notification.sent_at = datetime(2019, 8, 2, 17, 45) - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert ( - bucket_prefix - == "{folder}/NOTIFY.{reference}".format(folder="2019-08-02", reference=sample_notification.reference).upper() - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_get_bucket_name_and_prefix_for_notification_from_created_at_date( - sample_notification, -): - sample_notification.created_at = datetime(2019, 8, 1, 12, 00) - sample_notification.updated_at = datetime(2019, 8, 2, 12, 00) - sample_notification.sent_at = datetime(2019, 8, 3, 12, 00) - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert ( - bucket_prefix - == "{folder}/NOTIFY.{reference}".format(folder="2019-08-03", reference=sample_notification.reference).upper() - ) - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_notification_precompiled_letter_using_test_key( - sample_precompiled_letter_notification_using_test_key, -): - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification_using_test_key) - - assert bucket == current_app.config["TEST_LETTERS_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification_using_test_key.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_notification_templated_letter_using_test_key( - sample_letter_notification, -): - sample_letter_notification.key_type = KEY_TYPE_TEST - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_letter_notification) - - assert bucket == current_app.config["TEST_LETTERS_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_letter_notification.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_failed_validation( - sample_precompiled_letter_notification, -): - sample_precompiled_letter_notification.status = NOTIFICATION_VALIDATION_FAILED - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification) - - assert bucket == current_app.config["INVALID_PDF_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_test_noti_with_failed_validation( - sample_precompiled_letter_notification_using_test_key, -): - sample_precompiled_letter_notification_using_test_key.status = NOTIFICATION_VALIDATION_FAILED - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification_using_test_key) - - assert bucket == current_app.config["INVALID_PDF_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification_using_test_key.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -def test_get_bucket_name_and_prefix_for_notification_invalid_notification(): - with pytest.raises(AttributeError): - get_bucket_name_and_prefix_for_notification(None) - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "crown_flag,expected_crown_text", - [ - (True, "C"), - (False, "N"), - ], -) -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_filename(notify_api, mocker, crown_flag, expected_crown_text): - filename = get_letter_pdf_filename(reference="foo", crown=crown_flag) - - assert filename == "2017-12-04/NOTIFY.FOO.D.2.C.{}.20171204172900.PDF".format(expected_crown_text) - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "postage,expected_postage", - [ - ("second", 2), - ("first", 1), - ], -) -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_postage_for_filename(notify_api, postage, expected_postage): - filename = get_letter_pdf_filename(reference="foo", crown=True, postage=postage) - - assert filename == "2017-12-04/NOTIFY.FOO.D.{}.C.C.20171204172900.PDF".format(expected_postage) - - -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_filename_for_test_letters(notify_api, mocker): - filename = get_letter_pdf_filename(reference="foo", crown="C", is_scan_letter=True) - - assert filename == "NOTIFY.FOO.D.2.C.C.20171204172900.PDF" - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-12-04 17:31:00") -@pytest.mark.skip(reason="Letter feature") -def test_get_letter_pdf_filename_returns_tomorrows_filename(notify_api, mocker): - filename = get_letter_pdf_filename(reference="foo", crown=True) - - assert filename == "2017-12-05/NOTIFY.FOO.D.2.C.C.20171204173100.PDF" - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -@pytest.mark.parametrize( - "bucket_config_name,filename_format", - [ - ("TEST_LETTERS_BUCKET_NAME", "NOTIFY.FOO.D.2.C.C.%Y%m%d%H%M%S.PDF"), - ("LETTERS_PDF_BUCKET_NAME", "%Y-%m-%d/NOTIFY.FOO.D.2.C.C.%Y%m%d%H%M%S.PDF"), - ], -) -@freeze_time(FROZEN_DATE_TIME) -def test_get_letter_pdf_gets_pdf_from_correct_bucket( - sample_precompiled_letter_notification_using_test_key, - bucket_config_name, - filename_format, -): - if bucket_config_name == "LETTERS_PDF_BUCKET_NAME": - sample_precompiled_letter_notification_using_test_key.key_type = KEY_TYPE_NORMAL - - bucket_name = current_app.config[bucket_config_name] - filename = datetime.utcnow().strftime(filename_format) - conn = boto3.resource("s3") - conn.create_bucket(Bucket=bucket_name) - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - ret = get_letter_pdf(sample_precompiled_letter_notification_using_test_key) - - assert ret == b"pdf_content" - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "is_precompiled_letter,bucket_config_name", - [(False, "LETTERS_PDF_BUCKET_NAME"), (True, "LETTERS_SCAN_BUCKET_NAME")], -) -def test_upload_letter_pdf_to_correct_bucket(sample_letter_notification, mocker, is_precompiled_letter, bucket_config_name): - if is_precompiled_letter: - sample_letter_notification.template.hidden = True - sample_letter_notification.template.name = PRECOMPILED_TEMPLATE_NAME - - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - filename = get_letter_pdf_filename( - reference=sample_letter_notification.reference, - crown=sample_letter_notification.service.crown, - is_scan_letter=is_precompiled_letter, - ) - - upload_letter_pdf(sample_letter_notification, b"\x00\x01", precompiled=is_precompiled_letter) - - mock_s3.assert_called_once_with( - bucket_name=current_app.config[bucket_config_name], - file_location=filename, - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize("postage,expected_postage", [("second", 2), ("first", 1)]) -def test_upload_letter_pdf_uses_postage_from_notification(sample_letter_template, mocker, postage, expected_postage): - letter_notification = save_notification(create_notification(template=sample_letter_template, postage=postage)) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - filename = get_letter_pdf_filename( - reference=letter_notification.reference, - crown=letter_notification.service.crown, - is_scan_letter=False, - postage=letter_notification.postage, - ) - - upload_letter_pdf(letter_notification, b"\x00\x01", precompiled=False) - - mock_s3.assert_called_once_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - file_location=filename, - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_move_failed_pdf_error(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - move_failed_pdf(filename, ScanErrorType.ERROR) - - assert "ERROR/" + filename in [o.key for o in bucket.objects.all()] - assert filename not in [o.key for o in bucket.objects.all()] - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_move_failed_pdf_scan_failed(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - move_failed_pdf(filename, ScanErrorType.FAILURE) - - assert "FAILURE/" + filename in [o.key for o in bucket.objects.all()] - assert filename not in [o.key for o in bucket.objects.all()] - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_copy_redaction_failed_pdf(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - copy_redaction_failed_pdf(filename) - - assert "REDACTION_FAILURE/" + filename in [o.key for o in bucket.objects.all()] - assert filename in [o.key for o in bucket.objects.all()] - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "freeze_date, expected_folder_name", - [ - ("2018-04-01 17:50:00", "2018-04-02/"), - ("2018-07-02 16:29:00", "2018-07-02/"), - ("2018-07-02 16:30:00", "2018-07-02/"), - ("2018-07-02 16:31:00", "2018-07-03/"), - ("2018-01-02 16:31:00", "2018-01-02/"), - ("2018-01-02 17:31:00", "2018-01-03/"), - ("2018-07-02 22:30:00", "2018-07-03/"), - ("2018-07-02 23:30:00", "2018-07-03/"), - ("2018-07-03 00:30:00", "2018-07-03/"), - ("2018-01-02 22:30:00", "2018-01-03/"), - ("2018-01-02 23:30:00", "2018-01-03/"), - ("2018-01-03 00:30:00", "2018-01-03/"), - ], -) -@pytest.mark.skip(reason="Letter feature") -def test_get_folder_name_in_british_summer_time(notify_api, freeze_date, expected_folder_name): - with freeze_time(freeze_date): - now = datetime.utcnow() - folder_name = get_folder_name(_now=now, is_test_or_scan_letter=False) - assert folder_name == expected_folder_name - - -@pytest.mark.skip(reason="Letter tests") -def test_get_folder_name_returns_empty_string_for_test_letter(): - assert "" == get_folder_name(datetime.utcnow(), is_test_or_scan_letter=True) - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-07-07 20:00:00") -@pytest.mark.skip(reason="Letter feature") -def test_letter_print_day_returns_today_if_letter_was_printed_after_1730_yesterday(): - created_at = datetime(2017, 7, 6, 17, 30) - assert letter_print_day(created_at) == "today" - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-07-07 16:30:00") -def test_letter_print_day_returns_today_if_letter_was_printed_today(): - created_at = datetime(2017, 7, 7, 12, 0) - assert letter_print_day(created_at) == "today" - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "created_at, formatted_date", - [ - (datetime(2017, 7, 5, 16, 30), "on 6 July"), - (datetime(2017, 7, 6, 16, 29), "on 6 July"), - (datetime(2016, 8, 8, 10, 00), "on 8 August"), - (datetime(2016, 12, 12, 17, 29), "on 12 December"), - (datetime(2016, 12, 12, 17, 30), "on 13 December"), - ], -) -@freeze_time("2017-07-07 16:30:00") -@pytest.mark.skip(reason="Letter feature") -def test_letter_print_day_returns_formatted_date_if_letter_printed_before_1730_yesterday(created_at, formatted_date): - assert letter_print_day(created_at) == formatted_date diff --git a/tests/app/letters/test_returned_letters.py b/tests/app/letters/test_returned_letters.py deleted file mode 100644 index 7cdf223641..0000000000 --- a/tests/app/letters/test_returned_letters.py +++ /dev/null @@ -1,27 +0,0 @@ -import pytest - - -@pytest.mark.skip(reason="Deprecated: LETTER CODE") -@pytest.mark.parametrize( - "status, references", - [ - (200, ["1234567890ABCDEF", "1234567890ABCDEG"]), - (400, ["1234567890ABCDEFG", "1234567890ABCDEG"]), - (400, ["1234567890ABCDE", "1234567890ABCDEG"]), - (400, ["1234567890ABCDE\u26d4", "1234567890ABCDEG"]), - (400, ["NOTIFY0001234567890ABCDEF", "1234567890ABCDEG"]), - ], -) -def test_process_returned_letters(status, references, admin_request, mocker): - mock_celery = mocker.patch("app.letters.rest.process_returned_letters_list.apply_async") - - response = admin_request.post( - "letter-job.create_process_returned_letters_job", - _data={"references": references}, - _expected_status=status, - ) - - if status != 200: - assert "{} does not match".format(references[0]) in response["errors"][0]["message"] - else: - mock_celery.assert_called_once_with([references], queue="database-tasks") diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 342943b433..4c2069a6e2 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -38,7 +38,6 @@ Notification, Service, ServiceEmailReplyTo, - ServiceLetterContact, ServicePermission, ServiceSmsSender, User, @@ -57,7 +56,6 @@ create_ft_notification_status, create_inbound_number, create_letter_branding, - create_letter_contact, create_notification, create_organisation, create_reply_to_email, @@ -662,47 +660,6 @@ def test_cant_update_service_org_type_to_random_value(client, sample_service): assert resp.status_code == 500 -def test_update_service_letter_branding(client, notify_db, sample_service): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - data = {"letter_branding": str(letter_branding.id)} - - auth_header = create_authorization_header() - - resp = client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - result = resp.json - assert resp.status_code == 200 - assert result["data"]["letter_branding"] == str(letter_branding.id) - - -def test_update_service_remove_letter_branding(client, notify_db, sample_service): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - sample_service - data = {"letter_branding": str(letter_branding.id)} - - auth_header = create_authorization_header() - - client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - data = {"letter_branding": None} - resp = client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - result = resp.json - assert resp.status_code == 200 - assert result["data"]["letter_branding"] is None - - def test_update_service_remove_email_branding(admin_request, notify_db, sample_service): brand = EmailBranding(colour="#000000", logo="justice-league.png", name="Justice League") sample_service.email_branding = brand @@ -2429,25 +2386,6 @@ def test_send_one_off_notification(sample_service, admin_request, mocker): assert response["id"] == str(noti.id) -def test_create_pdf_letter(mocker, sample_service_full_permissions, client, fake_uuid, notify_user): - mocker.patch("app.service.send_notification.utils_s3download") - mocker.patch("app.service.send_notification.get_page_count", return_value=1) - mocker.patch("app.service.send_notification.move_uploaded_pdf_to_letters_bucket") - - user = sample_service_full_permissions.users[0] - data = json.dumps({"filename": "valid.pdf", "created_by": str(user.id), "file_id": fake_uuid}) - - response = client.post( - url_for("service.create_pdf_letter", service_id=sample_service_full_permissions.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - json_resp = json.loads(response.get_data(as_text=True)) - - assert response.status_code == 201 - assert json_resp == {"id": fake_uuid} - - def test_get_notification_for_service_includes_template_redacted(admin_request, sample_notification): resp = admin_request.get( "service.get_notification_for_service", @@ -2963,218 +2901,6 @@ def test_get_email_reply_to_address(client, notify_db, notify_db_session): assert json.loads(response.get_data(as_text=True)) == reply_to.serialize() -def test_get_letter_contacts_when_there_are_no_letter_contacts(client, sample_service): - response = client.get( - "/service/{}/letter-contact".format(sample_service.id), - headers=[create_authorization_header()], - ) - - assert json.loads(response.get_data(as_text=True)) == [] - assert response.status_code == 200 - - -def test_get_letter_contacts_with_one_letter_contact(client, notify_db, notify_db_session): - service = create_service() - create_letter_contact(service, "Aberdeen, AB23 1XH") - - response = client.get( - "/service/{}/letter-contact".format(service.id), - headers=[create_authorization_header()], - ) - json_response = json.loads(response.get_data(as_text=True)) - - assert len(json_response) == 1 - assert json_response[0]["contact_block"] == "Aberdeen, AB23 1XH" - assert json_response[0]["is_default"] - assert json_response[0]["created_at"] - assert not json_response[0]["updated_at"] - assert response.status_code == 200 - - -def test_get_letter_contacts_with_multiple_letter_contacts(client, notify_db, notify_db_session): - service = create_service() - letter_contact_a = create_letter_contact(service, "Aberdeen, AB23 1XH") - letter_contact_b = create_letter_contact(service, "London, E1 8QS", False) - - response = client.get( - "/service/{}/letter-contact".format(service.id), - headers=[create_authorization_header()], - ) - json_response = json.loads(response.get_data(as_text=True)) - - assert len(json_response) == 2 - assert response.status_code == 200 - - assert json_response[0]["id"] == str(letter_contact_a.id) - assert json_response[0]["service_id"] == str(letter_contact_a.service_id) - assert json_response[0]["contact_block"] == "Aberdeen, AB23 1XH" - assert json_response[0]["is_default"] - assert json_response[0]["created_at"] - assert not json_response[0]["updated_at"] - - assert json_response[1]["id"] == str(letter_contact_b.id) - assert json_response[1]["service_id"] == str(letter_contact_b.service_id) - assert json_response[1]["contact_block"] == "London, E1 8QS" - assert not json_response[1]["is_default"] - assert json_response[1]["created_at"] - assert not json_response[1]["updated_at"] - - -def test_get_letter_contact_by_id(client, notify_db, notify_db_session): - service = create_service() - letter_contact = create_letter_contact(service, "London, E1 8QS") - - response = client.get( - "/service/{}/letter-contact/{}".format(service.id, letter_contact.id), - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 200 - assert json.loads(response.get_data(as_text=True)) == letter_contact.serialize() - - -def test_get_letter_contact_return_404_when_invalid_contact_id(client, notify_db, notify_db_session): - service = create_service() - - response = client.get( - "/service/{}/letter-contact/{}".format(service.id, "93d59f88-4aa1-453c-9900-f61e2fc8a2de"), - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - - -def test_add_service_contact_block(client, sample_service): - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 201 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 1 - assert json_resp["data"] == results[0].serialize() - - -def test_add_service_letter_contact_can_add_multiple_addresses(client, sample_service): - first = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=first, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - second = json.dumps({"contact_block": "Aberdeen, AB23 1XH", "is_default": True}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=second, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 201 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 2 - default = [x for x in results if x.is_default] - assert json_resp["data"] == default[0].serialize() - first_letter_contact_not_default = [x for x in results if not x.is_default] - assert first_letter_contact_not_default[0].contact_block == "London, E1 8QS" - - -def test_add_service_letter_contact_block_fine_if_no_default(client, sample_service): - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": False}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 201 - - -def test_add_service_letter_contact_block_404s_when_invalid_service_id(client, notify_db, notify_db_session): - response = client.post( - "/service/{}/letter-contact".format(uuid.uuid4()), - data={}, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - result = json.loads(response.get_data(as_text=True)) - assert result["result"] == "error" - assert result["message"] == "No result found" - - -def test_update_service_letter_contact(client, sample_service): - original_letter_contact = create_letter_contact(service=sample_service, contact_block="Aberdeen, AB23 1XH") - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - response = client.post( - "/service/{}/letter-contact/{}".format(sample_service.id, original_letter_contact.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 200 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 1 - assert json_resp["data"] == results[0].serialize() - - -def test_update_service_letter_contact_returns_200_when_no_default(client, sample_service): - original_reply_to = create_letter_contact(service=sample_service, contact_block="Aberdeen, AB23 1XH") - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": False}) - response = client.post( - "/service/{}/letter-contact/{}".format(sample_service.id, original_reply_to.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 200 - - -def test_update_service_letter_contact_returns_404_when_invalid_service_id(client, notify_db, notify_db_session): - response = client.post( - "/service/{}/letter-contact/{}".format(uuid.uuid4(), uuid.uuid4()), - data={}, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - result = json.loads(response.get_data(as_text=True)) - assert result["result"] == "error" - assert result["message"] == "No result found" - - -def test_delete_service_letter_contact_can_archive_letter_contact(admin_request, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Edinburgh, ED1 1AA") - letter_contact = create_letter_contact(service=service, contact_block="Swansea, SN1 3CC", is_default=False) - - admin_request.post( - "service.delete_service_letter_contact", - service_id=service.id, - letter_contact_id=letter_contact.id, - ) - - assert letter_contact.archived is True - - -def test_delete_service_letter_contact_returns_200_if_archiving_template_default(admin_request, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Edinburgh, ED1 1AA") - letter_contact = create_letter_contact(service=service, contact_block="Swansea, SN1 3CC", is_default=False) - create_template(service=service, template_type="letter", reply_to=letter_contact.id) - - response = admin_request.post( - "service.delete_service_letter_contact", - service_id=service.id, - letter_contact_id=letter_contact.id, - _expected_status=200, - ) - assert response["data"]["archived"] is True - - def test_add_service_sms_sender_can_add_multiple_senders(client, notify_db_session): service = create_service() data = { @@ -3480,91 +3206,6 @@ def test_cancel_notification_for_service_raises_invalid_request_when_notificatio assert response["result"] == "error" -@pytest.mark.parametrize( - "notification_status", - [ - "cancelled", - "sending", - "sent", - "delivered", - "pending", - "failed", - "technical-failure", - "temporary-failure", - "permanent-failure", - "validation-failed", - "virus-scan-failed", - "returned-letter", - ], -) -@freeze_time("2018-07-07 12:00:00") -def test_cancel_notification_for_service_raises_invalid_request_when_letter_is_in_wrong_state_to_be_cancelled( - admin_request, - sample_letter_notification, - notification_status, -): - sample_letter_notification.status = notification_status - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - _expected_status=400, - ) - assert response["message"] == "It’s too late to cancel this letter. Printing started today at 5.30pm" - assert response["result"] == "error" - - -@pytest.mark.parametrize("notification_status", ["created", "pending-virus-check"]) -@freeze_time("2018-07-07 16:00:00") -def test_cancel_notification_for_service_updates_letter_if_letter_is_in_cancellable_state( - admin_request, - sample_letter_notification, - notification_status, -): - sample_letter_notification.status = notification_status - sample_letter_notification.created_at = datetime.now() - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - ) - assert response["status"] == "cancelled" - - -@freeze_time("2017-12-12 17:30:00") -def test_cancel_notification_for_service_raises_error_if_its_too_late_to_cancel( - admin_request, - sample_letter_notification, -): - sample_letter_notification.created_at = datetime(2017, 12, 11, 17, 0) - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - _expected_status=400, - ) - assert response["message"] == "It’s too late to cancel this letter. Printing started on 11 December at 5.30pm" - assert response["result"] == "error" - - -@freeze_time("2018-7-7 16:00:00") -def test_cancel_notification_for_service_updates_letter_if_still_time_to_cancel( - admin_request, - sample_letter_notification, -): - sample_letter_notification.created_at = datetime(2018, 7, 7, 10, 0) - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - ) - assert response["status"] == "cancelled" - - def test_get_monthly_notification_data_by_service(mocker, admin_request): dao_mock = mocker.patch( "app.service.rest.fact_notification_status_dao.fetch_monthly_notification_statuses_per_service", diff --git a/tests/app/service/test_send_pdf_letter_notification.py b/tests/app/service/test_send_pdf_letter_notification.py deleted file mode 100644 index b236c83cab..0000000000 --- a/tests/app/service/test_send_pdf_letter_notification.py +++ /dev/null @@ -1,111 +0,0 @@ -import uuid - -import pytest -from freezegun import freeze_time -from notifications_utils.s3 import S3ObjectNotFound - -from app.dao.notifications_dao import get_notification_by_id -from app.models import EMAIL_TYPE, LETTER_TYPE, UPLOAD_LETTERS -from app.service.send_notification import send_pdf_letter_notification -from app.v2.errors import BadRequestError, TooManyRequestsError -from tests.app.db import create_service - - -@pytest.mark.parametrize( - "permissions", - [ - [EMAIL_TYPE], - [LETTER_TYPE], - [UPLOAD_LETTERS], - ], -) -def test_send_pdf_letter_notification_raises_error_if_service_does_not_have_permission( - notify_db_session, - fake_uuid, - permissions, -): - service = create_service(service_permissions=permissions) - post_data = {"filename": "valid.pdf", "created_by": fake_uuid, "file_id": fake_uuid} - - with pytest.raises(BadRequestError): - send_pdf_letter_notification(service.id, post_data) - - -def test_send_pdf_letter_notification_raises_error_if_service_is_over_daily_message_limit( - mocker, - sample_service_full_permissions, - fake_uuid, -): - mocker.patch( - "app.service.send_notification.check_service_over_daily_message_limit", - side_effect=TooManyRequestsError(10), - ) - post_data = {"filename": "valid.pdf", "created_by": fake_uuid, "file_id": fake_uuid} - - with pytest.raises(TooManyRequestsError): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_validates_created_by(sample_service_full_permissions, fake_uuid, sample_user): - post_data = { - "filename": "valid.pdf", - "created_by": sample_user.id, - "file_id": fake_uuid, - } - - with pytest.raises(BadRequestError): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_raises_error_when_pdf_is_not_in_transient_letter_bucket( - mocker, - sample_service_full_permissions, - fake_uuid, - notify_user, -): - user = sample_service_full_permissions.users[0] - post_data = {"filename": "valid.pdf", "created_by": user.id, "file_id": fake_uuid} - mocker.patch( - "app.service.send_notification.utils_s3download", - side_effect=S3ObjectNotFound({}, ""), - ) - - with pytest.raises(S3ObjectNotFound): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -@freeze_time("2019-08-02 11:00:00") -def test_send_pdf_letter_notification_creates_notification_and_moves_letter( - mocker, - sample_service_full_permissions, - notify_user, -): - user = sample_service_full_permissions.users[0] - filename = "valid.pdf" - file_id = uuid.uuid4() - post_data = {"filename": filename, "created_by": user.id, "file_id": file_id} - - mocker.patch("app.service.send_notification.utils_s3download") - mocker.patch("app.service.send_notification.get_page_count", return_value=1) - s3_mock = mocker.patch("app.service.send_notification.move_uploaded_pdf_to_letters_bucket") - - result = send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - notification = get_notification_by_id(file_id) - - assert notification.id == file_id - assert notification.api_key_id is None - assert notification.client_reference == filename - assert notification.created_by_id == user.id - assert notification.postage == "second" - assert notification.notification_type == LETTER_TYPE - assert notification.billable_units == 1 - assert notification.to == filename - assert notification.service_id == sample_service_full_permissions.id - - assert result == {"id": str(notification.id)} - - s3_mock.assert_called_once_with( - "service-{}/{}.pdf".format(sample_service_full_permissions.id, file_id), - "2019-08-02/NOTIFY.{}.D.2.C.C.20190802110000.PDF".format(notification.reference), - ) From 4ad86fd5ff1c5e4c8345381dbb0aa656b912828e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 12 Jun 2024 12:28:45 -0300 Subject: [PATCH 15/15] fix(deps): update dependency gunicorn to v22 [security] (#2156) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- poetry.lock | 15 ++++++++------- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8209d2ed6f..a8d96b5928 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1632,22 +1632,23 @@ test = ["objgraph", "psutil"] [[package]] name = "gunicorn" -version = "20.1.0" +version = "22.0.0" description = "WSGI HTTP Server for UNIX" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, - {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, + {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, + {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, ] [package.dependencies] -setuptools = ">=3.0" +packaging = "*" [package.extras] -eventlet = ["eventlet (>=0.24.1)"] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] tornado = ["tornado (>=0.2)"] [[package]] @@ -4212,4 +4213,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "62653f4a581d32ac1678c8454f1100320096e8166aca1599cffd6fd3f72cfb4b" +content-hash = "6acef3767b11b980f9b6b4172211b8bec7bd9eb4a65cd8c341bd61b9b6913336" diff --git a/pyproject.toml b/pyproject.toml index 9844f1c756..b13b7db3d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ Flask-SQLAlchemy = { git = "https://github.com/pallets-eco/flask-sqlalchemy.git" Flask = "2.3.3" click-datetime = "0.2" gevent = "23.9.1" -gunicorn = "20.1.0" +gunicorn = "22.0.0" iso8601 = "2.0.0" jsonschema = "3.2.0" marshmallow-sqlalchemy = "0.29.0"