diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 328816279b..19b03edfa9 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -20,7 +20,7 @@ services: - db db: - image: postgres:11.20-bullseye@sha256:98fac4e8dc6fb58a75f2be563e876842f53db5baadb0d98abdd3205a20f6e6eb + image: postgres:11.22-bullseye@sha256:c886a3236b3d11abc302e64309186c90a69b49e53ccff23fd8c8b057b5b4bce9 volumes: - ./initdb:/docker-entrypoint-initdb.d restart: always @@ -38,7 +38,7 @@ services: - "5432:5432" redis: - image: redis:6.2@sha256:9e75c88539241ad7f61bc9c39ea4913b354064b8a75ca5fc40e1cef41b645bc0 + image: redis:6.2@sha256:d4948d011cc38e94f0aafb8f9a60309bd93034e07d10e0767af534512cf012a9 restart: always command: redis-server --port 6380 ports: diff --git a/.devcontainer/scripts/notify-dev-entrypoint.sh b/.devcontainer/scripts/notify-dev-entrypoint.sh index e2f99ea29a..a2d1fa10de 100755 --- a/.devcontainer/scripts/notify-dev-entrypoint.sh +++ b/.devcontainer/scripts/notify-dev-entrypoint.sh @@ -25,6 +25,7 @@ echo -e "complete -F __start_kubectl k" >> ~/.zshrc # Smoke test # requires adding files .env_staging and .env_prod to the root of the project +echo -e "alias smoke-local='cd /workspace && cp .env_smoke_local tests_smoke/.env && poetry run make smoke-test-local'" >> ~/.zshrc echo -e "alias smoke-staging='cd /workspace && cp .env_smoke_staging tests_smoke/.env && poetry run make smoke-test'" >> ~/.zshrc echo -e "alias smoke-prod='cd /workspace && cp .env_smoke_prod tests_smoke/.env && poetry run make smoke-test'" >> ~/.zshrc diff --git a/.env.example b/.env.example index 8e60a9b5ae..6557dd4a88 100644 --- a/.env.example +++ b/.env.example @@ -19,3 +19,7 @@ AWS_PINPOINT_REGION=us-west-2 AWS_EMF_ENVIRONMENT=local CONTACT_FORM_EMAIL_ADDRESS = "" + +AWS_PINPOINT_SC_POOL_ID= +AWS_PINPOINT_SC_TEMPLATE_IDS= +AWS_PINPOINT_DEFAULT_POOL_ID= diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7976f87fde..1027577cb7 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -27,15 +27,15 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Initialize CodeQL - uses: github/codeql-action/init@2f93e4319b2f04a2efc38fa7f78bd681bc3f7b2f # v2.23.2 + uses: github/codeql-action/init@ffd3158cb9024ebd018dbf20756f28befbd168c7 # v2.24.10 with: languages: ${{ matrix.language }} queries: +security-and-quality - name: Autobuild - uses: github/codeql-action/autobuild@2f93e4319b2f04a2efc38fa7f78bd681bc3f7b2f # v2.23.2 + uses: github/codeql-action/autobuild@ffd3158cb9024ebd018dbf20756f28befbd168c7 # v2.24.10 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@2f93e4319b2f04a2efc38fa7f78bd681bc3f7b2f # v2.23.2 + uses: github/codeql-action/analyze@ffd3158cb9024ebd018dbf20756f28befbd168c7 # v2.24.10 with: category: "/language:${{ matrix.language }}" diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml index e4c5312d51..5ff8c67f88 100644 --- a/.github/workflows/docker.yaml +++ b/.github/workflows/docker.yaml @@ -9,6 +9,7 @@ env: DOCKER_ORG: public.ecr.aws/v6b8u5o6 DOCKER_SLUG: public.ecr.aws/v6b8u5o6/notify-api KUBECTL_VERSION: '1.23.6' + WORKFLOW_PAT: ${{ secrets.WORKFLOW_GITHUB_PAT }} permissions: id-token: write # This is required for requesting the OIDC JWT @@ -26,13 +27,6 @@ jobs: unzip -q awscliv2.zip sudo ./aws/install --update aws --version - - name: Install kubectl - run: | - curl -LO https://storage.googleapis.com/kubernetes-release/release/v$KUBECTL_VERSION/bin/linux/amd64/kubectl - chmod +x ./kubectl - sudo mv ./kubectl /usr/local/bin/kubectl - kubectl version --client - mkdir -p $HOME/.kube - name: Configure credentials to CDS public ECR using OIDC uses: aws-actions/configure-aws-credentials@master @@ -40,7 +34,7 @@ jobs: role-to-assume: arn:aws:iam::283582579564:role/notification-api-apply role-session-name: NotifyApiGitHubActions aws-region: "us-east-1" - + - name: Login to ECR id: login-ecr uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1 @@ -56,43 +50,14 @@ jobs: -t $DOCKER_SLUG:${GITHUB_SHA::7} \ -t $DOCKER_SLUG:latest \ -f ci/Dockerfile . + - name: Publish run: | docker push $DOCKER_SLUG:latest && docker push $DOCKER_SLUG:${GITHUB_SHA::7} - - name: Configure credentials to Notify account using OIDC - uses: aws-actions/configure-aws-credentials@master - with: - role-to-assume: arn:aws:iam::239043911459:role/notification-api-apply - role-session-name: NotifyApiGitHubActions - aws-region: "ca-central-1" - - - name: Get Kubernetes configuration - run: | - aws eks --region $AWS_REGION update-kubeconfig --name notification-canada-ca-staging-eks-cluster --kubeconfig $HOME/.kube/config - - name: Update images in staging + - name: Rollout in Kubernetes run: | - kubectl set image deployment.apps/api api=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-beat celery-beat=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-sms celery-sms=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-primary celery-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-scalable celery-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-sms-send-primary celery-sms-send-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-sms-send-scalable celery-sms-send-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-email-send-primary celery-email-send-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-email-send-scalable celery-email-send-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - - - name: Restart deployments in staging - run: | - kubectl rollout restart deployment/api -n notification-canada-ca - kubectl rollout restart deployment/celery-beat -n notification-canada-ca - kubectl rollout restart deployment/celery-sms -n notification-canada-ca - kubectl rollout restart deployment/celery-primary -n notification-canada-ca - kubectl rollout restart deployment/celery-scalable -n notification-canada-ca - kubectl rollout restart deployment/celery-sms-send-primary -n notification-canada-ca - kubectl rollout restart deployment/celery-sms-send-scalable -n notification-canada-ca - kubectl rollout restart deployment/celery-email-send-primary -n notification-canada-ca - kubectl rollout restart deployment/celery-email-send-scalable -n notification-canada-ca + ./scripts/callManifestsRollout.sh ${GITHUB_SHA::7} - name: my-app-install token id: notify-pr-bot @@ -118,3 +83,4 @@ jobs: run: | json="{'text':' CI is failing in !'}" curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }} + diff --git a/.github/workflows/export_github_data.yml b/.github/workflows/export_github_data.yml index d12fdc1360..51ccbcb18b 100644 --- a/.github/workflows/export_github_data.yml +++ b/.github/workflows/export_github_data.yml @@ -14,7 +14,7 @@ jobs: DNS_PROXY_FORWARDTOSENTINEL: "true" DNS_PROXY_LOGANALYTICSWORKSPACEID: ${{ secrets.LOG_ANALYTICS_WORKSPACE_ID }} DNS_PROXY_LOGANALYTICSSHAREDKEY: ${{ secrets.LOG_ANALYTICS_WORKSPACE_KEY }} - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - name: Export Data uses: cds-snc/github-repository-metadata-exporter@main with: diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml index f612283a7c..dd97d6ea44 100644 --- a/.github/workflows/ossf-scorecard.yml +++ b/.github/workflows/ossf-scorecard.yml @@ -20,12 +20,12 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@0ae0fb3a2ca18a43d6dea9c07cfb9bd01d17eae1 + uses: ossf/scorecard-action@7699f539c2b9ff754039f0e173fdf1a4e4a1e143 with: results_file: ossf-results.json results_format: json diff --git a/.github/workflows/s3-backup.yml b/.github/workflows/s3-backup.yml index eb41d4c82e..b19055191c 100644 --- a/.github/workflows/s3-backup.yml +++ b/.github/workflows/s3-backup.yml @@ -10,12 +10,12 @@ jobs: steps: - name: Checkout - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: fetch-depth: 0 # retrieve all history - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1 + uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 with: aws-access-key-id: ${{ secrets.AWS_S3_BACKUP_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_S3_BACKUP_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 8d5c023e27..4f9a621187 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -6,7 +6,7 @@ jobs: runs-on: ubuntu-latest services: postgres: - image: postgres:11.20-bullseye@sha256:4e4b23580ada59c9ec5a712bdff9f91b0e6a7898d9ea954306b953c426727cef + image: postgres:11.22-bullseye@sha256:c886a3236b3d11abc302e64309186c90a69b49e53ccff23fd8c8b057b5b4bce9 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres @@ -67,7 +67,7 @@ jobs: run: | cp -f .env.example .env - name: Checks for new endpoints against AWS WAF rules - uses: cds-snc/notification-utils/.github/actions/waffles@06a40db6286f525fe3551e029418458d33342592 # 52.1.0 + uses: cds-snc/notification-utils/.github/actions/waffles@52.2.2 with: app-loc: '/github/workspace' app-libs: '/github/workspace/env/site-packages' diff --git a/.vscode/launch.json b/.vscode/launch.json index 628e4cc52f..711f43a9c6 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -43,7 +43,7 @@ "-l", "DEBUG", "-Q", - "database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-throttled-sms-tasks,send-email-high,send-email-medium,send-email-low,send-email-tasks,service-callbacks,delivery-receipts", + "database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,service-callbacks-retry,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-throttled-sms-tasks,send-email-high,send-email-medium,send-email-low,send-email-tasks,service-callbacks,delivery-receipts", ] }, { diff --git a/Makefile b/Makefile index 3f98e79566..919458bf49 100644 --- a/Makefile +++ b/Makefile @@ -46,21 +46,25 @@ format: smoke-test: cd tests_smoke && poetry run python smoke_test.py +.PHONY: smoke-test-local +smoke-test-local: + cd tests_smoke && poetry run python smoke_test.py --local --nofiles + .PHONY: run run: ## Run the web app - flask run -p 6011 --host=0.0.0.0 + poetry run flask run -p 6011 --host=0.0.0.0 .PHONY: run-celery-local run-celery-local: ## Run the celery workers with all the queues - ./scripts/run_celery_local.sh + poetry run ./scripts/run_celery_local.sh .PHONY: run-celery-local-filtered run-celery-local-filtered: ## Run the celery workers with all queues but filter out common scheduled tasks - ./scripts/run_celery_local.sh 2>&1 >/dev/null | grep -iEv 'beat|in-flight-to-inbox|run-scheduled-jobs|check-job-status' + poetry run ./scripts/run_celery_local.sh 2>&1 >/dev/null | grep -iEv 'beat|in-flight-to-inbox|run-scheduled-jobs|check-job-status' .PHONY: run-celery-purge run-celery-purge: ## Purge the celery queues - ./scripts/run_celery_purge.sh + poetry run ./scripts/run_celery_purge.sh .PHONY: run-db run-db: ## psql to access dev database diff --git a/README.md b/README.md index d2c8a6bf01..e36ade9a6e 100644 --- a/README.md +++ b/README.md @@ -17,78 +17,7 @@ Contains: For any issues during the following instructions, make sure to review the **Frequent problems** section toward the end of the document. -### Local installation instruction - -#### On OS X: - -1. Install PyEnv with Homebrew. This will preserve your sanity. - -`brew install pyenv` - -2. Install Python 3.10.8 or whatever is the latest - -`pyenv install 3.10.8` - -3. If you expect no conflicts, set `3.10.8` as you default - -`pyenv global 3.10.8` - -4. Ensure it installed by running - -`python --version` - -if it did not, take a look here: https://github.com/pyenv/pyenv/issues/660 - -5. Install `poetry`: - -`pip install poetry==1.3.2` - -6. Restart your terminal and make your virtual environtment: - -`poetry env use $(which python)` - -8. Verify that the environment was created and activated by poetry - -`poetry env list` - -9. Install [Postgres.app](http://postgresapp.com/). - -10. Create the database for the application - -`createdb --user=postgres notification_api` - -11. Install the required environment variables via our LastPast Vault - -Within the team's *LastPass Vault*, you should find corresponding folders for this -project containing the `.env` content that you should copy in your project root folder. This -will grant the application necessary access to our internal infrastructure. - -If you don't have access to our *LastPass Vault* (as you evaluate our notification -platform for example), you will find a sane set of defaults exists in the `.env.example` -file. Copy that file to `.env` and customize it to your needs. - -12. Install all dependencies - -`poetry install` - -1. Generate the version file ?!? - -`make generate-version-file` - -14. Run all DB migrations - -`flask db upgrade` - -15. Run the service - -`make run` - -15a. To test - -`poetry install --with test` - -`make test` - +### Local installation instruction (Use Dev Containers) #### In a [VS Code devcontainer](https://code.visualstudio.com/docs/remote/containers-tutorial) 1. Install VS Code diff --git a/app/__init__.py b/app/__init__.py index 77a2a7d545..c3c144620e 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -28,6 +28,7 @@ PerformancePlatformClient, ) from app.clients.salesforce.salesforce_client import SalesforceClient +from app.clients.sms.aws_pinpoint import AwsPinpointClient from app.clients.sms.aws_sns import AwsSnsClient from app.dbsetup import RoutingSQLAlchemy from app.encryption import CryptoSigner @@ -45,6 +46,7 @@ notify_celery = NotifyCelery() aws_ses_client = AwsSesClient() aws_sns_client = AwsSnsClient() +aws_pinpoint_client = AwsPinpointClient() signer_notification = CryptoSigner() signer_personalisation = CryptoSigner() signer_complaint = CryptoSigner() @@ -107,6 +109,7 @@ def create_app(application, config=None): statsd_client.init_app(application) logging.init_app(application, statsd_client) aws_sns_client.init_app(application, statsd_client=statsd_client) + aws_pinpoint_client.init_app(application, statsd_client=statsd_client) aws_ses_client.init_app(application.config["AWS_REGION"], statsd_client=statsd_client) notify_celery.init_app(application) @@ -120,7 +123,7 @@ def create_app(application, config=None): performance_platform_client.init_app(application) document_download_client.init_app(application) - clients.init_app(sms_clients=[aws_sns_client], email_clients=[aws_ses_client]) + clients.init_app(sms_clients=[aws_sns_client, aws_pinpoint_client], email_clients=[aws_ses_client]) if application.config["FF_SALESFORCE_CONTACT"]: salesforce_client.init_app(application) diff --git a/app/api_key/rest.py b/app/api_key/rest.py index 8ad96ec1e8..1aa6b28fd9 100644 --- a/app/api_key/rest.py +++ b/app/api_key/rest.py @@ -121,8 +121,7 @@ def revoke_api_keys(): # Step 1 try: - # take last 36 chars of string so that it works even if the full key is provided. - api_key_token = api_key_data["token"][-36:] + api_key_token = api_key_data["token"] api_key = get_api_key_by_secret(api_key_token) except Exception: current_app.logger.error( diff --git a/app/authentication/auth.py b/app/authentication/auth.py index 144c89079f..5fe609a060 100644 --- a/app/authentication/auth.py +++ b/app/authentication/auth.py @@ -63,7 +63,7 @@ def get_auth_token(req): for el in AUTH_TYPES: scheme, auth_type, _ = el if auth_header.lower().startswith(scheme.lower()): - token = auth_header[len(scheme) + 1 :] + token = auth_header[len(scheme) + 1 :].strip() return auth_type, token raise AuthError( @@ -152,21 +152,12 @@ def requires_auth(): def _auth_by_api_key(auth_token): - # TODO: uncomment this when the grace period for the token prefix is over - # orig_token = auth_token - try: - # take last 36 chars of string so that it works even if the full key is provided. - auth_token = auth_token[-36:] api_key = get_api_key_by_secret(auth_token) - - # TODO: uncomment this when the grace period for the token prefix is over - # check for token prefix - # if current_app.config["API_KEY_PREFIX"] not in orig_token: - # raise AuthError("Invalid token: you must re-generate your API key to continue using GC Notify", 403, service_id=api_key.service.id, api_key_id=api_key.id) - except NoResultFound: raise AuthError("Invalid token: API key not found", 403) + except ValueError: + raise AuthError("Invalid token: Enter your full API key", 403) _auth_with_api_key(api_key, api_key.service) diff --git a/app/aws/mocks.py b/app/aws/mocks.py index 46c6f5fe10..99c7dad216 100644 --- a/app/aws/mocks.py +++ b/app/aws/mocks.py @@ -192,6 +192,106 @@ def sns_failed_callback(provider_response, reference=None, timestamp="2016-06-28 return _sns_callback(body) +# Note that 1467074434 = 2016-06-28 00:40:34.558 UTC +def pinpoint_successful_callback(reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_SUCCESSFUL", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": False, + "originationPhoneNumber": "+13655550100", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "mcc": "302", + "mnc": "610", + "carrierName": "Bell Cellular Inc. / Aliant Telecom", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "SUCCESSFUL", + "messageStatusDescription": "Message has been accepted by phone carrier", + "totalMessageParts": 1, + "totalMessagePrice": 0.00581, + "totalCarrierFee": 0.00767, + } + + return _pinpoint_callback(body) + + +def pinpoint_delivered_callback(reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_DELIVERED", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": True, + "originationPhoneNumber": "+13655550100", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "mcc": "302", + "mnc": "610", + "carrierName": "Bell Cellular Inc. / Aliant Telecom", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "DELIVERED", + "messageStatusDescription": "Message has been accepted by phone", + "totalMessageParts": 1, + "totalMessagePrice": 0.00581, + "totalCarrierFee": 0.006, + } + + return _pinpoint_callback(body) + + +def pinpoint_shortcode_delivered_callback(reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_SUCCESSFUL", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": True, + "originationPhoneNumber": "555555", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "SUCCESSFUL", + "messageStatusDescription": "Message has been accepted by phone carrier", + "totalMessageParts": 1, + "totalMessagePrice": 0.02183, + "totalCarrierFee": 0.005, + } + + return _pinpoint_callback(body) + + +# Note that 1467074434 = 2016-06-28 00:40:34.558 UTC +def pinpoint_failed_callback(provider_response, reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_CARRIER_UNREACHABLE", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": True, + "originationPhoneNumber": "+13655550100", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "CARRIER_UNREACHABLE", + "messageStatusDescription": provider_response, + "totalMessageParts": 1, + "totalMessagePrice": 0.00581, + "totalCarrierFee": 0.006, + } + + return _pinpoint_callback(body) + + def _ses_bounce_callback(reference, bounce_type, bounce_subtype=None): ses_message_body = { "bounce": { @@ -267,3 +367,19 @@ def _sns_callback(body): "UnsubscribeUrl": "https://sns.ca-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=[REACTED]", "MessageAttributes": {}, } + + +def _pinpoint_callback(body): + return { + "Type": "Notification", + "MessageId": "8e83c020-1234-1234-1234-92a8ee9baa0a", + "TopicArn": "arn:aws:sns:ca-central-1:12341234:ses_notifications", + "Subject": None, + "Message": json.dumps(body), + "Timestamp": "2017-11-17T12:14:03.710Z", + "SignatureVersion": "1", + "Signature": "[REDACTED]", + "SigningCertUrl": "https://sns.ca-central-1.amazonaws.com/SimpleNotificationService-[REDACTED].pem", + "UnsubscribeUrl": "https://sns.ca-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=[REACTED]", + "MessageAttributes": {}, + } diff --git a/app/aws/s3.py b/app/aws/s3.py index 524eb876a9..9ebeb6d137 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -1,5 +1,6 @@ import uuid from datetime import datetime, timedelta +from typing import List import botocore import pytz @@ -7,6 +8,8 @@ from flask import current_app from notifications_utils.s3 import s3upload as utils_s3upload +from app.models import Job + FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv" @@ -60,8 +63,20 @@ def get_job_metadata_from_s3(service_id, job_id): return obj.get()["Metadata"] -def remove_job_from_s3(service_id, job_id): - return remove_s3_object(*get_job_location(service_id, job_id)) +def remove_jobs_from_s3(jobs: List[Job], batch_size=1000): + """ + Remove the files from S3 for the given jobs. + + Args: + jobs (List[Job]): The jobs whose files need to be removed from S3. + batch_size (int, optional): The number of jobs to process in each boto call. Defaults to the AWS maximum of 1000. + """ + + bucket = resource("s3").Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"]) + + for start in range(0, len(jobs), batch_size): + object_keys = [FILE_LOCATION_STRUCTURE.format(job.service_id, job.id) for job in jobs[start : start + batch_size]] + bucket.delete_objects(Delete={"Objects": [{"Key": key} for key in object_keys]}) def get_s3_bucket_objects(bucket_name, subfolder="", older_than=7, limit_days=2): diff --git a/app/celery/letters_pdf_tasks.py b/app/celery/letters_pdf_tasks.py index 059f0fc940..e40a10b366 100644 --- a/app/celery/letters_pdf_tasks.py +++ b/app/celery/letters_pdf_tasks.py @@ -1,392 +1,51 @@ -import base64 -import math -from base64 import urlsafe_b64encode -from datetime import datetime -from hashlib import sha512 -from json import JSONDecodeError -from uuid import UUID - -from botocore.exceptions import ClientError as BotoClientError -from flask import current_app -from notifications_utils.s3 import s3upload from notifications_utils.statsd_decorators import statsd -from PyPDF2.utils import PdfReadError -from requests import RequestException -from requests import post as requests_post from app import notify_celery -from app.aws import s3 -from app.config import QueueNames, TaskNames from app.cronitor import cronitor -from app.dao.notifications_dao import ( - dao_get_notification_by_reference, - dao_get_notifications_by_references, - dao_update_notification, - dao_update_notifications_by_reference, - get_notification_by_id, - update_notification_status_by_id, -) -from app.errors import VirusScanError -from app.letters.utils import ( - ScanErrorType, - copy_redaction_failed_pdf, - get_file_names_from_error_bucket, - get_folder_name, - get_page_count, - get_reference_from_filename, - move_error_pdf_to_scan_bucket, - move_failed_pdf, - move_scan_to_invalid_pdf_bucket, - upload_letter_pdf, -) -from app.models import ( - KEY_TYPE_TEST, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_VALIDATION_FAILED, - NOTIFICATION_VIRUS_SCAN_FAILED, -) -from celery.exceptions import MaxRetriesExceededError @notify_celery.task(bind=True, name="create-letters-pdf", max_retries=15, default_retry_delay=300) @statsd(namespace="tasks") def create_letters_pdf(self, notification_id): - try: - notification = get_notification_by_id(notification_id, _raise=True) - pdf_data, billable_units = get_letters_pdf( - notification.template, - contact_block=notification.reply_to_text, - filename=notification.service.letter_branding and notification.service.letter_branding.filename, - values=notification.personalisation, - ) - - upload_letter_pdf(notification, pdf_data) - - if notification.key_type != KEY_TYPE_TEST: - notification.billable_units = billable_units - dao_update_notification(notification) - - current_app.logger.info( - "Letter notification reference {reference}: billable units set to {billable_units}".format( - reference=str(notification.reference), billable_units=billable_units - ) - ) - - except (RequestException, BotoClientError): - try: - current_app.logger.exception("Letters PDF notification creation for id: {} failed".format(notification_id)) - self.retry(queue=QueueNames.RETRY) - except MaxRetriesExceededError: - current_app.logger.error( - "RETRY FAILED: task create_letters_pdf failed for notification {}".format(notification_id), - ) - update_notification_status_by_id(notification_id, "technical-failure") + pass def get_letters_pdf(template, contact_block, filename, values): - template_for_letter_print = { - "subject": template.subject, - "content": template.content, - } - - data = { - "letter_contact_block": contact_block, - "template": template_for_letter_print, - "values": values, - "filename": filename, - } - resp = requests_post( - "{}/print.pdf".format(current_app.config["TEMPLATE_PREVIEW_API_HOST"]), - json=data, - headers={"Authorization": "Token {}".format(current_app.config["TEMPLATE_PREVIEW_API_KEY"])}, - ) - resp.raise_for_status() - - pages_per_sheet = 2 - billable_units = math.ceil(int(resp.headers.get("X-pdf-page-count", 0)) / pages_per_sheet) - - return resp.content, billable_units + pass @notify_celery.task(name="collate-letter-pdfs-for-day") @cronitor("collate-letter-pdfs-for-day") def collate_letter_pdfs_for_day(date=None): - if not date: - # Using the truncated date is ok because UTC to BST does not make a difference to the date, - # since it is triggered mid afternoon. - date = datetime.utcnow().strftime("%Y-%m-%d") - - letter_pdfs = sorted( - s3.get_s3_bucket_objects(current_app.config["LETTERS_PDF_BUCKET_NAME"], subfolder=date), - key=lambda letter: letter["Key"], - ) - for i, letters in enumerate(group_letters(letter_pdfs)): - filenames = [letter["Key"] for letter in letters] - - hash = urlsafe_b64encode(sha512("".join(filenames).encode()).digest())[:20].decode() - # eg NOTIFY.2018-12-31.001.Wjrui5nAvObjPd-3GEL-.ZIP - dvla_filename = "NOTIFY.{date}.{num:03}.{hash}.ZIP".format(date=date, num=i + 1, hash=hash) - - current_app.logger.info( - "Calling task zip-and-send-letter-pdfs for {} pdfs to upload {} with total size {:,} bytes".format( - len(filenames), dvla_filename, sum(letter["Size"] for letter in letters) - ) - ) - notify_celery.send_task( - name=TaskNames.ZIP_AND_SEND_LETTER_PDFS, - kwargs={"filenames_to_zip": filenames, "upload_filename": dvla_filename}, - queue=QueueNames.PROCESS_FTP, - compression="zlib", - ) + pass def group_letters(letter_pdfs): - """ - Group letters in chunks of MAX_LETTER_PDF_ZIP_FILESIZE. Will add files to lists, never going over that size. - If a single file is (somehow) larger than MAX_LETTER_PDF_ZIP_FILESIZE that'll be in a list on it's own. - If there are no files, will just exit (rather than yielding an empty list). - """ - running_filesize = 0 - list_of_files = [] - for letter in letter_pdfs: - if letter["Key"].lower().endswith(".pdf") and letter_in_created_state(letter["Key"]): - if ( - running_filesize + letter["Size"] > current_app.config["MAX_LETTER_PDF_ZIP_FILESIZE"] - or len(list_of_files) >= current_app.config["MAX_LETTER_PDF_COUNT_PER_ZIP"] - ): - yield list_of_files - running_filesize = 0 - list_of_files = [] - - running_filesize += letter["Size"] - list_of_files.append(letter) - - if list_of_files: - yield list_of_files + pass def letter_in_created_state(filename): - # filename looks like '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF' - subfolder = filename.split("/")[0] - ref = get_reference_from_filename(filename) - notifications = dao_get_notifications_by_references([ref]) - if notifications: - if notifications[0].status == NOTIFICATION_CREATED: - return True - current_app.logger.info( - "Collating letters for {} but notification with reference {} already in {}".format( - subfolder, ref, notifications[0].status - ) - ) - return False + pass @notify_celery.task(bind=True, name="process-virus-scan-passed", max_retries=15, default_retry_delay=300) def process_virus_scan_passed(self, filename): - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - current_app.logger.info("notification id {} Virus scan passed: {}".format(notification.id, filename)) - - is_test_key = notification.key_type == KEY_TYPE_TEST - - scan_pdf_object = s3.get_s3_object(current_app.config["LETTERS_SCAN_BUCKET_NAME"], filename) - old_pdf = scan_pdf_object.get()["Body"].read() - - try: - billable_units = get_page_count(old_pdf) - except PdfReadError: - current_app.logger.exception(msg="Invalid PDF received for notification_id: {}".format(notification.id)) - _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object) - return - - sanitise_response = _sanitise_precompiled_pdf(self, notification, old_pdf) - if not sanitise_response: - new_pdf = None - else: - sanitise_response = sanitise_response.json() - try: - new_pdf = base64.b64decode(sanitise_response["file"].encode()) - except JSONDecodeError: - new_pdf = sanitise_response.content - - redaction_failed_message = sanitise_response.get("redaction_failed_message") - if redaction_failed_message and not is_test_key: - current_app.logger.info("{} for notification id {} ({})".format(redaction_failed_message, notification.id, filename)) - copy_redaction_failed_pdf(filename) - - # TODO: Remove this once CYSP update their template to not cross over the margins - if notification.service_id == UUID("fe44178f-3b45-4625-9f85-2264a36dd9ec"): # CYSP - # Check your state pension submit letters with good addresses and notify tags, so just use their supplied pdf - new_pdf = old_pdf - - if not new_pdf: - current_app.logger.info("Invalid precompiled pdf received {} ({})".format(notification.id, filename)) - _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object) - return - else: - current_app.logger.info("Validation was successful for precompiled pdf {} ({})".format(notification.id, filename)) - - current_app.logger.info("notification id {} ({}) sanitised and ready to send".format(notification.id, filename)) - - try: - _upload_pdf_to_test_or_live_pdf_bucket(new_pdf, filename, is_test_letter=is_test_key) - - update_letter_pdf_status( - reference=reference, - status=NOTIFICATION_DELIVERED if is_test_key else NOTIFICATION_CREATED, - billable_units=billable_units, - ) - scan_pdf_object.delete() - except BotoClientError: - current_app.logger.exception("Error uploading letter to live pdf bucket for notification: {}".format(notification.id)) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - - -def _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object): - try: - move_scan_to_invalid_pdf_bucket(filename) - scan_pdf_object.delete() - - update_letter_pdf_status( - reference=notification.reference, - status=NOTIFICATION_VALIDATION_FAILED, - billable_units=0, - ) - except BotoClientError: - current_app.logger.exception("Error when moving letter with id {} to invalid PDF bucket".format(notification.id)) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - - -def _upload_pdf_to_test_or_live_pdf_bucket(pdf_data, filename, is_test_letter): - target_bucket_config = "TEST_LETTERS_BUCKET_NAME" if is_test_letter else "LETTERS_PDF_BUCKET_NAME" - target_bucket_name = current_app.config[target_bucket_config] - target_filename = get_folder_name(datetime.utcnow(), is_test_letter) + filename - - s3upload( - filedata=pdf_data, - region=current_app.config["AWS_REGION"], - bucket_name=target_bucket_name, - file_location=target_filename, - ) - - -def _sanitise_precompiled_pdf(self, notification, precompiled_pdf): - try: - response = requests_post( - "{}/precompiled/sanitise".format(current_app.config["TEMPLATE_PREVIEW_API_HOST"]), - data=precompiled_pdf, - headers={ - "Authorization": "Token {}".format(current_app.config["TEMPLATE_PREVIEW_API_KEY"]), - "Service-ID": str(notification.service_id), - "Notification-ID": str(notification.id), - }, - ) - response.raise_for_status() - return response - except RequestException as ex: - if ex.response is not None and ex.response.status_code == 400: - message = "sanitise_precompiled_pdf validation error for notification: {}. ".format(notification.id) - if "message" in response.json(): - message += response.json()["message"] - - current_app.logger.info(message) - return None - - try: - current_app.logger.exception("sanitise_precompiled_pdf failed for notification: {}".format(notification.id)) - self.retry(queue=QueueNames.RETRY) - except MaxRetriesExceededError: - current_app.logger.error( - "RETRY FAILED: sanitise_precompiled_pdf failed for notification {}".format(notification.id), - ) - - notification.status = NOTIFICATION_TECHNICAL_FAILURE - dao_update_notification(notification) - raise + pass @notify_celery.task(name="process-virus-scan-failed") def process_virus_scan_failed(filename): - move_failed_pdf(filename, ScanErrorType.FAILURE) - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - updated_count = update_letter_pdf_status(reference, NOTIFICATION_VIRUS_SCAN_FAILED, billable_units=0) - - if updated_count != 1: - raise Exception( - "There should only be one letter notification for each reference. Found {} notifications".format(updated_count) - ) - - error = VirusScanError("notification id {} Virus scan failed: {}".format(notification.id, filename)) - current_app.logger.exception(error) - raise error + pass @notify_celery.task(name="process-virus-scan-error") def process_virus_scan_error(filename): - move_failed_pdf(filename, ScanErrorType.ERROR) - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - updated_count = update_letter_pdf_status(reference, NOTIFICATION_TECHNICAL_FAILURE, billable_units=0) - - if updated_count != 1: - raise Exception( - "There should only be one letter notification for each reference. Found {} notifications".format(updated_count) - ) - error = VirusScanError("notification id {} Virus scan error: {}".format(notification.id, filename)) - current_app.logger.exception(error) - raise error + pass def update_letter_pdf_status(reference, status, billable_units): - return dao_update_notifications_by_reference( - references=[reference], - update_dict={ - "status": status, - "billable_units": billable_units, - "updated_at": datetime.utcnow(), - }, - )[0] + pass def replay_letters_in_error(filename=None): - # This method can be used to replay letters that end up in the ERROR directory. - # We had an incident where clamAV was not processing the virus scan. - if filename: - move_error_pdf_to_scan_bucket(filename) - # call task to add the filename to anti virus queue - current_app.logger.info("Calling scan_file for: {}".format(filename)) - - if current_app.config["ANTIVIRUS_ENABLED"]: - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={"filename": filename}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - process_virus_scan_passed.apply_async( - kwargs={"filename": filename}, - queue=QueueNames.LETTERS, - ) - else: - error_files = get_file_names_from_error_bucket() - for item in error_files: - moved_file_name = item.key.split("/")[1] - current_app.logger.info("Calling scan_file for: {}".format(moved_file_name)) - move_error_pdf_to_scan_bucket(moved_file_name) - # call task to add the filename to anti virus queue - if current_app.config["ANTIVIRUS_ENABLED"]: - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={"filename": moved_file_name}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - process_virus_scan_passed.apply_async( - kwargs={"filename": moved_file_name}, - queue=QueueNames.LETTERS, - ) + pass diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index c69e32018a..4c3e5832d1 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -1,4 +1,5 @@ from datetime import datetime, timedelta +from typing import List import pytz from flask import current_app @@ -12,7 +13,7 @@ from app.config import QueueNames from app.cronitor import cronitor from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention -from app.dao.jobs_dao import dao_archive_job, dao_get_jobs_older_than_data_retention +from app.dao.jobs_dao import dao_archive_jobs, dao_get_jobs_older_than_data_retention from app.dao.notifications_dao import ( dao_timeout_notifications, delete_notifications_older_than_retention_by_type, @@ -37,26 +38,37 @@ @notify_celery.task(name="remove_sms_email_jobs") @cronitor("remove_sms_email_jobs") @statsd(namespace="tasks") -def remove_sms_email_csv_files(): - _remove_csv_files([EMAIL_TYPE, SMS_TYPE]) +def remove_sms_email_jobs(): + """ + Remove csv files from s3 and archive email and sms jobs older than data retention period. + """ + + _archive_jobs([EMAIL_TYPE, SMS_TYPE]) @notify_celery.task(name="remove_letter_jobs") @cronitor("remove_letter_jobs") @statsd(namespace="tasks") -def remove_letter_csv_files(): - _remove_csv_files([LETTER_TYPE]) - - -def _remove_csv_files(job_types): - jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) - current_app.logger.info("TEMP LOGGING: trying to remove {} jobs.".format(len(jobs))) - for job in jobs: - current_app.logger.info("TEMP LOGGING: trying to remove Job ID {} from s3.".format(job.id)) - s3.remove_job_from_s3(job.service_id, job.id) - current_app.logger.info("TEMP LOGGING: trying to archive Job ID {}".format(job.id)) - dao_archive_job(job) - current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) +def remove_letter_jobs(): + _archive_jobs([LETTER_TYPE]) + + +def _archive_jobs(job_types: List[str]): + """ + Remove csv files from s3 and archive jobs older than data retention period. + + Args: + job_types (List[str]): list of job types to remove csv files and archive jobs for + """ + + while True: + jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=100) + if len(jobs) == 0: + break + current_app.logger.info("Archiving {} jobs.".format(len(jobs))) + s3.remove_jobs_from_s3(jobs) + dao_archive_jobs(jobs) + current_app.logger.info(f"Jobs archived: {[job.id for job in jobs]}") @notify_celery.task(name="delete-sms-notifications") diff --git a/app/celery/process_pinpoint_receipts_tasks.py b/app/celery/process_pinpoint_receipts_tasks.py new file mode 100644 index 0000000000..d5bbb4d1bc --- /dev/null +++ b/app/celery/process_pinpoint_receipts_tasks.py @@ -0,0 +1,154 @@ +from datetime import datetime +from typing import Union + +from flask import current_app, json +from notifications_utils.statsd_decorators import statsd +from sqlalchemy.orm.exc import NoResultFound + +from app import notify_celery, statsd_client +from app.config import QueueNames +from app.dao import notifications_dao +from app.models import ( + NOTIFICATION_DELIVERED, + NOTIFICATION_PERMANENT_FAILURE, + NOTIFICATION_SENT, + NOTIFICATION_TECHNICAL_FAILURE, + NOTIFICATION_TEMPORARY_FAILURE, + PINPOINT_PROVIDER, +) +from app.notifications.callbacks import _check_and_queue_callback_task +from celery.exceptions import Retry + +# Pinpoint receipts are of the form: +# { +# "eventType": "TEXT_DELIVERED", +# "eventVersion": "1.0", +# "eventTimestamp": 1712944268877, +# "isFinal": true, +# "originationPhoneNumber": "+13655550100", +# "destinationPhoneNumber": "+16135550123", +# "isoCountryCode": "CA", +# "mcc": "302", +# "mnc": "610", +# "carrierName": "Bell Cellular Inc. / Aliant Telecom", +# "messageId": "221bc70c-7ee6-4987-b1ba-9684ba25be20", +# "messageRequestTimestamp": 1712944267685, +# "messageEncoding": "GSM", +# "messageType": "TRANSACTIONAL", +# "messageStatus": "DELIVERED", +# "messageStatusDescription": "Message has been accepted by phone", +# "totalMessageParts": 1, +# "totalMessagePrice": 0.00581, +# "totalCarrierFee": 0.006 +# } + + +@notify_celery.task(bind=True, name="process-pinpoint-result", max_retries=5, default_retry_delay=300) +@statsd(namespace="tasks") +def process_pinpoint_results(self, response): + try: + receipt = json.loads(response["Message"]) + reference = receipt["messageId"] + status = receipt["messageStatus"] + provider_response = receipt["messageStatusDescription"] + isFinal = receipt["isFinal"] + + notification_status = determine_pinpoint_status(status, provider_response, isFinal) + + if notification_status == NOTIFICATION_SENT: + return # we don't want to update the status to sent if it's already sent + + if not notification_status: + current_app.logger.warning(f"unhandled provider response for reference {reference}, received '{provider_response}'") + notification_status = NOTIFICATION_TECHNICAL_FAILURE # revert to tech failure by default + + try: + notification = notifications_dao.dao_get_notification_by_reference(reference) + except NoResultFound: + try: + current_app.logger.warning( + f"RETRY {self.request.retries}: notification not found for Pinpoint reference {reference} (update to {notification_status}). " + f"Callback may have arrived before notification was persisted to the DB. Adding task to retry queue" + ) + self.retry(queue=QueueNames.RETRY) + except self.MaxRetriesExceededError: + current_app.logger.warning( + f"notification not found for Pinpoint reference: {reference} (update to {notification_status}). Giving up." + ) + return + if notification.sent_by != PINPOINT_PROVIDER: + current_app.logger.exception(f"Pinpoint callback handled notification {notification.id} not sent by Pinpoint") + return + + if notification.status != NOTIFICATION_SENT: + notifications_dao._duplicate_update_warning(notification, notification_status) + return + + notifications_dao._update_notification_status( + notification=notification, + status=notification_status, + provider_response=provider_response, + ) + + if notification_status != NOTIFICATION_DELIVERED: + current_app.logger.info( + ( + f"Pinpoint delivery failed: notification id {notification.id} and reference {reference} has error found. " + f"Provider response: {provider_response}" + ) + ) + else: + current_app.logger.info( + f"Pinpoint callback return status of {notification_status} for notification: {notification.id}" + ) + + statsd_client.incr(f"callback.pinpoint.{notification_status}") + + if notification.sent_at: + statsd_client.timing_with_dates("callback.pinpoint.elapsed-time", datetime.utcnow(), notification.sent_at) + + _check_and_queue_callback_task(notification) + + except Retry: + raise + + except Exception as e: + current_app.logger.exception(f"Error processing Pinpoint results: {str(e)}") + self.retry(queue=QueueNames.RETRY) + + +def determine_pinpoint_status(status: str, provider_response: str, isFinal: bool) -> Union[str, None]: + """Determine the notification status based on the SMS status and provider response. + + Args: + status (str): message status from AWS + provider_response (str): detailed status from the SMS provider + isFinal (bool): whether this is the last update for this send + + Returns: + Union[str, None]: the notification status or None if the status is not handled + """ + + if status == "DELIVERED" or status == "SUCCESSFUL" and isFinal: + return NOTIFICATION_DELIVERED + elif status == "SUCCESSFUL": # carrier has accepted the message but it hasn't gone to the phone yet + return NOTIFICATION_SENT + + response_lower = provider_response.lower() + + if "blocked" in response_lower: + return NOTIFICATION_TECHNICAL_FAILURE + elif "invalid" in response_lower: + return NOTIFICATION_TECHNICAL_FAILURE + elif "is opted out" in response_lower: + return NOTIFICATION_PERMANENT_FAILURE + elif "unknown error" in response_lower: + return NOTIFICATION_TECHNICAL_FAILURE + elif "exceed max price" in response_lower: + return NOTIFICATION_TECHNICAL_FAILURE + elif "phone carrier is currently unreachable/unavailable" in response_lower: + return NOTIFICATION_TEMPORARY_FAILURE + elif "phone is currently unreachable/unavailable" in response_lower: + return NOTIFICATION_PERMANENT_FAILURE + else: + return None diff --git a/app/celery/provider_tasks.py b/app/celery/provider_tasks.py index 0539bd6ce1..4f21f9f2d9 100644 --- a/app/celery/provider_tasks.py +++ b/app/celery/provider_tasks.py @@ -42,10 +42,10 @@ def deliver_throttled_sms(self, notification_id): # Celery rate limits are per worker instance and not a global rate limit. # https://docs.celeryproject.org/en/stable/userguide/tasks.html#Task.rate_limit -# This task is dispatched through the `send-sms-tasks` queue. -# This queue is consumed by 6 Celery instances with 4 workers in production. -# The maximum throughput is therefore 6 instances * 4 workers = 24 tasks per second -# if we set rate_limit="1/s" on the Celery task +# We currently set rate_limit="1/s" on the Celery task and 4 workers per pod, and so a limit of 4 tasks per second per pod. +# The number of pods is controlled by the Kubernetes HPA and scales up and down with demand. +# Currently in production we have 3 celery-sms-send-primary pods, and up to 20 celery-sms-send-scalable pods +# This means we can send up to 92 messages per second. @notify_celery.task( bind=True, name="deliver_sms", diff --git a/app/celery/research_mode_tasks.py b/app/celery/research_mode_tasks.py index cae829f09c..cf1c013f8c 100644 --- a/app/celery/research_mode_tasks.py +++ b/app/celery/research_mode_tasks.py @@ -6,6 +6,8 @@ from app import create_uuid, notify_celery from app.aws.mocks import ( + pinpoint_delivered_callback, + pinpoint_failed_callback, ses_hard_bounce_callback, ses_notification_callback, ses_soft_bounce_callback, @@ -14,9 +16,11 @@ sns_success_callback, ) from app.aws.s3 import file_exists +from app.celery.process_pinpoint_receipts_tasks import process_pinpoint_results from app.celery.process_ses_receipts_tasks import process_ses_results from app.celery.process_sns_receipts_tasks import process_sns_results from app.config import QueueNames +from app.models import PINPOINT_PROVIDER, SNS_PROVIDER temp_fail = "+15149301633" perm_fail = "+15149301632" @@ -29,8 +33,14 @@ def send_sms_response(provider, to, reference=None): reference = reference or str(create_uuid()) - body = aws_sns_callback(reference, to) - process_sns_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + if provider == SNS_PROVIDER: + body = aws_sns_callback(reference, to) + process_sns_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + elif provider == PINPOINT_PROVIDER: + body = aws_pinpoint_callback(reference, to) + process_pinpoint_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + else: + raise ValueError("Provider {} not supported".format(provider)) return reference @@ -64,6 +74,25 @@ def aws_sns_callback(notification_id, to): return sns_success_callback(notification_id, destination=to, timestamp=timestamp) +def aws_pinpoint_callback(notification_id, to): + now = datetime.now() + timestamp = now.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + using_test_perm_fail_number = to.strip().endswith(perm_fail) + using_test_temp_fail_number = to.strip().endswith(temp_fail) + + if using_test_perm_fail_number or using_test_temp_fail_number: + return pinpoint_failed_callback( + "Phone is currently unreachable/unavailable" + if using_test_perm_fail_number + else "Phone carrier is currently unreachable/unavailable", + notification_id, + destination=to, + timestamp=timestamp, + ) + else: + return pinpoint_delivered_callback(notification_id, destination=to, timestamp=timestamp) + + @notify_celery.task( bind=True, name="create-fake-letter-response-file", diff --git a/app/celery/service_callback_tasks.py b/app/celery/service_callback_tasks.py index d5b413922f..9296958f85 100644 --- a/app/celery/service_callback_tasks.py +++ b/app/celery/service_callback_tasks.py @@ -59,37 +59,32 @@ def send_complaint_to_service(self, complaint_data): def _send_data_to_service_callback_api(self, data, service_callback_url, token, function_name): notification_id = data["notification_id"] if "notification_id" in data else data["id"] try: + current_app.logger.info("{} sending {} to {}".format(function_name, notification_id, service_callback_url)) response = request( method="POST", url=service_callback_url, data=json.dumps(data), headers={ "Content-Type": "application/json", - "Authorization": "Bearer {}".format(token), + "Authorization": f"Bearer {token}", }, - timeout=60, + timeout=5, ) + current_app.logger.info( - "{} sending {} to {}, response {}".format( - function_name, - notification_id, - service_callback_url, - response.status_code, - ) + f"{function_name} sending {notification_id} to {service_callback_url}, response {response.status_code}" ) + response.raise_for_status() except RequestException as e: current_app.logger.warning( - "{} request failed for notification_id: {} and url: {}. exc: {}".format( - function_name, notification_id, service_callback_url, e - ) + f"{function_name} request failed for notification_id: {notification_id} and url: {service_callback_url}. exc: {e}" ) - if not isinstance(e, HTTPError) or e.response.status_code >= 500: + # Retry if the response status code is server-side or 429 (too many requests). + if not isinstance(e, HTTPError) or e.response.status_code >= 500 or e.response.status_code == 429: try: - self.retry(queue=QueueNames.RETRY) + self.retry(queue=QueueNames.CALLBACKS_RETRY) except self.MaxRetriesExceededError: current_app.logger.warning( - "Retry: {} has retried the max num of times for callback url {} and notification_id: {}".format( - function_name, service_callback_url, notification_id - ) + "Retry: {function_name} has retried the max num of times for callback url {service_callback_url} and notification_id: {notification_id}" ) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index df10c1db29..64f8dd669d 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -8,7 +8,10 @@ from flask import current_app from itsdangerous import BadSignature from more_itertools import chunked -from notifications_utils.recipients import RecipientCSV +from notifications_utils.recipients import ( + RecipientCSV, + try_validate_and_format_phone_number, +) from notifications_utils.statsd_decorators import statsd from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate from notifications_utils.timezones import convert_utc_to_local_timezone @@ -243,13 +246,18 @@ def save_smss(self, service_id: Optional[str], signed_notifications: List[Signed sender_id = _notification.get("sender_id") # type: ignore notification_id = _notification.get("id", create_uuid()) - reply_to_text = "" # type: ignore - if sender_id: - reply_to_text = dao_get_service_sms_senders_by_id(service_id, sender_id).sms_sender - elif template.service: - reply_to_text = template.get_reply_to_text() + if "reply_to_text" in _notification and _notification["reply_to_text"]: + reply_to_text = _notification["reply_to_text"] else: - reply_to_text = service.get_default_sms_sender() # type: ignore + reply_to_text = "" # type: ignore + if sender_id: + reply_to_text = try_validate_and_format_phone_number( + dao_get_service_sms_senders_by_id(service_id, sender_id).sms_sender + ) + elif template.service: + reply_to_text = template.get_reply_to_text() + else: + reply_to_text = service.get_default_sms_sender() # type: ignore notification: VerifiedNotification = { **_notification, # type: ignore diff --git a/app/clients/freshdesk.py b/app/clients/freshdesk.py index 8194cb29c4..fd0ecc978b 100644 --- a/app/clients/freshdesk.py +++ b/app/clients/freshdesk.py @@ -59,12 +59,22 @@ def _generate_description(self): f"A new logo has been uploaded by {self.contact.name} ({self.contact.email_address}) for the following service:", f"- Service id: {self.contact.service_id}", f"- Service name: {self.contact.service_name}", + f"- Organisation id: {self.contact.organisation_id}", + f"- Organisation name: {self.contact.department_org_name}", f"- Logo filename: {self.contact.branding_url}", + f"- Logo name: {self.contact.branding_logo_name}", + f"- Alt text english: {self.contact.alt_text_en}", + f"- Alt text french: {self.contact.alt_text_fr}", "
", f"Un nouveau logo a été téléchargé par {self.contact.name} ({self.contact.email_address}) pour le service suivant :", f"- Identifiant du service : {self.contact.service_id}", f"- Nom du service : {self.contact.service_name}", + f"- Identifiant de l'organisation: {self.contact.organisation_id}", + f"- Nom de l'organisation: {self.contact.department_org_name}", f"- Nom du fichier du logo : {self.contact.branding_url}", + f"- Nom du logo : {self.contact.branding_logo_name}", + f"- Texte alternatif anglais : {self.contact.alt_text_en}", + f"- Texte alternatif français : {self.contact.alt_text_fr}", ] ) diff --git a/app/clients/salesforce/salesforce_account.py b/app/clients/salesforce/salesforce_account.py index 3002edb416..ec942b9259 100644 --- a/app/clients/salesforce/salesforce_account.py +++ b/app/clients/salesforce/salesforce_account.py @@ -32,7 +32,7 @@ def get_org_name_from_notes(organisation_notes: str, name_index: int = ORG_NOTES return organisation_notes -def get_account_id_from_name(session: Salesforce, account_name: str, generic_account_id: str) -> Optional[str]: +def get_account_id_from_name(session: Optional[Salesforce], account_name: str, generic_account_id: str) -> Optional[str]: """Returns the Account ID for the given Account Name. If no match is found, a generic Account not found ID is returned. diff --git a/app/clients/salesforce/salesforce_auth.py b/app/clients/salesforce/salesforce_auth.py index 181c6550b9..3fbcdbdf3b 100644 --- a/app/clients/salesforce/salesforce_auth.py +++ b/app/clients/salesforce/salesforce_auth.py @@ -1,3 +1,5 @@ +from typing import Optional + import requests from flask import current_app from simple_salesforce import Salesforce @@ -13,7 +15,7 @@ def send(self, *args, **kwargs): return super().send(*args, **kwargs) -def get_session(client_id: str, username: str, password: str, security_token: str, domain: str) -> Salesforce: +def get_session(client_id: str, username: str, password: str, security_token: str, domain: str) -> Optional[Salesforce]: """Return an authenticated Salesforce session Args: @@ -46,7 +48,7 @@ def get_session(client_id: str, username: str, password: str, security_token: st return session -def end_session(session: Salesforce): +def end_session(session: Optional[Salesforce]): """Logout of a Salesforce session Args: diff --git a/app/clients/salesforce/salesforce_client.py b/app/clients/salesforce/salesforce_client.py index 057cf76413..e320645df9 100644 --- a/app/clients/salesforce/salesforce_client.py +++ b/app/clients/salesforce/salesforce_client.py @@ -28,7 +28,7 @@ def init_app(self, app): # # Authentication # - def get_session(self) -> Salesforce: + def get_session(self) -> Optional[Salesforce]: """Returns an authenticated Salesforce session. Returns: @@ -36,7 +36,7 @@ def get_session(self) -> Salesforce: """ return salesforce_auth.get_session(self.client_id, self.username, self.password, self.security_token, self.domain) - def end_session(self, session: Salesforce) -> None: + def end_session(self, session: Optional[Salesforce]) -> None: """Revokes a Salesforce session. Args: @@ -73,7 +73,9 @@ def contact_update(self, user: User) -> None: salesforce_contact.update(session, user, user_updates) self.end_session(session) - def contact_update_account_id(self, session: Salesforce, service: Service, user: User) -> Tuple[Optional[str], Optional[str]]: + def contact_update_account_id( + self, session: Optional[Salesforce], service: Service, user: User + ) -> Tuple[Optional[str], Optional[str]]: """Updates the Account ID for the given Notify user's Salesforce Contact. The Salesforce Account ID and Contact ID are returned. diff --git a/app/clients/salesforce/salesforce_contact.py b/app/clients/salesforce/salesforce_contact.py index a8397eb445..a982c39c93 100644 --- a/app/clients/salesforce/salesforce_contact.py +++ b/app/clients/salesforce/salesforce_contact.py @@ -16,7 +16,7 @@ from app.models import User -def create(session: Salesforce, user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: +def create(session: Optional[Salesforce], user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: """Create a Salesforce Contact from the given Notify User Args: @@ -38,7 +38,7 @@ def create(session: Salesforce, user: User, field_updates: dict[str, Optional[st "Email": user.email_address, } field_values = field_default_values | field_updates - result = session.Contact.create( + result = session.Contact.create( # type: ignore field_values, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, ) @@ -50,7 +50,7 @@ def create(session: Salesforce, user: User, field_updates: dict[str, Optional[st return contact_id -def update(session: Salesforce, user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: +def update(session: Optional[Salesforce], user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: """Update a Contact's details. If the Contact does not exist, it is created. Args: @@ -67,8 +67,8 @@ def update(session: Salesforce, user: User, field_updates: dict[str, Optional[st # Existing contact, update the AccountID if contact: - result = session.Contact.update( - contact.get("Id"), field_updates, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"} + result = session.Contact.update( # type:ignore + str(contact.get("Id")), field_updates, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"} ) parse_result(result, f"Salesforce Contact update '{user.email_address}' with '{field_updates}'") contact_id = contact.get("Id") @@ -81,7 +81,7 @@ def update(session: Salesforce, user: User, field_updates: dict[str, Optional[st return contact_id -def get_contact_by_user_id(session: Salesforce, user_id: str) -> Optional[dict[str, str]]: +def get_contact_by_user_id(session: Optional[Salesforce], user_id: str) -> Optional[dict[str, str]]: """Retrieve a Salesforce Contact by their Notify user ID. If they can't be found, `None` is returned. diff --git a/app/clients/salesforce/salesforce_engagement.py b/app/clients/salesforce/salesforce_engagement.py index 1637311fa3..3462ceae7b 100644 --- a/app/clients/salesforce/salesforce_engagement.py +++ b/app/clients/salesforce/salesforce_engagement.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any, Dict, Optional from flask import current_app from simple_salesforce import Salesforce @@ -22,7 +22,11 @@ def create( - session: Salesforce, service: Service, field_updates: dict[str, str], account_id: Optional[str], contact_id: Optional[str] + session: Optional[Salesforce], + service: Service, + field_updates: dict[str, str], + account_id: Optional[str], + contact_id: Optional[str], ) -> Optional[str]: """Create a Salesforce Engagement for the given Notify service @@ -38,7 +42,7 @@ def create( """ engagement_id = None try: - if account_id and contact_id: + if account_id and contact_id and session: # Default Engagement values, which can be overridden by passing in field_updates field_default_values = { "Name": service.name, @@ -54,7 +58,7 @@ def create( "Product_to_Add__c": ENGAGEMENT_PRODUCT, } field_values = field_default_values | field_updates - result = session.Opportunity.create( + result = session.Opportunity.create( # type: ignore engagement_maxlengths(field_values), headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, ) @@ -63,7 +67,7 @@ def create( # Create the Product association if engagement_id: - result = session.OpportunityLineItem.create( + result = session.OpportunityLineItem.create( # type: ignore { "OpportunityId": engagement_id, "PricebookEntryId": current_app.config["SALESFORCE_ENGAGEMENT_STANDARD_PRICEBOOK_ID"], @@ -76,7 +80,7 @@ def create( parse_result(result, f"Salesforce Engagement OpportunityLineItem create for service ID {service.id}") else: current_app.logger.error( - f"SF_ERR Salesforce Engagement create failed: missing Account ID '{account_id}' or Contact ID '{contact_id}' for service ID {service.id}" + f"SF_ERR Salesforce Engagement create failed: missing Account ID '{account_id}' or Contact ID '{contact_id}' for service ID {service.id} or the session is not available. '{session}'" ) except Exception as ex: current_app.logger.error(f"SF_ERR Salesforce Engagement create failed: {ex}") @@ -84,7 +88,11 @@ def create( def update( - session: Salesforce, service: Service, field_updates: dict[str, str], account_id: Optional[str], contact_id: Optional[str] + session: Optional[Salesforce], + service: Service, + field_updates: dict[str, str], + account_id: Optional[str], + contact_id: Optional[str], ) -> Optional[str]: """Update an Engagement. If the Engagement does not exist, it is created. @@ -104,8 +112,8 @@ def update( # Existing Engagement, update the stage name if engagement: - result = session.Opportunity.update( - engagement.get("Id"), + result = session.Opportunity.update( # type: ignore + str(engagement.get("Id")), engagement_maxlengths(field_updates), headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, ) @@ -120,7 +128,9 @@ def update( return engagement_id -def contact_role_add(session: Salesforce, service: Service, account_id: Optional[str], contact_id: Optional[str]) -> None: +def contact_role_add( + session: Optional[Salesforce], service: Service, account_id: Optional[str], contact_id: Optional[str] +) -> None: """Adds an Engagement ContactRole based on the provided Notify service and Contact. If the Engagement does not exist, it is created. @@ -136,7 +146,7 @@ def contact_role_add(session: Salesforce, service: Service, account_id: Optional try: engagement = get_engagement_by_service_id(session, str(service.id)) if engagement: - result = session.OpportunityContactRole.create( + result = session.OpportunityContactRole.create( # type: ignore {"ContactId": contact_id, "OpportunityId": engagement.get("Id")}, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, ) @@ -147,7 +157,9 @@ def contact_role_add(session: Salesforce, service: Service, account_id: Optional current_app.logger.error(f"SF_ERR Salesforce ContactRole add for {contact_id} with '{service.id}' failed: {ex}") -def contact_role_delete(session: Salesforce, service: Service, account_id: Optional[str], contact_id: Optional[str]) -> None: +def contact_role_delete( + session: Optional[Salesforce], service: Service, account_id: Optional[str], contact_id: Optional[str] +) -> None: """Deletes an Engagement ContactRole based on the provided Notify service and Salesforce Contact. If the Engagement does not exist, it is created. @@ -161,19 +173,19 @@ def contact_role_delete(session: Salesforce, service: Service, account_id: Optio None """ try: - result = {} + result: Dict[str, Any] = {} engagement = get_engagement_by_service_id(session, str(service.id)) engagement_id = engagement.get("Id") if engagement else create(session, service, {}, account_id, contact_id) engagement_contact_role = get_engagement_contact_role(session, engagement_id, contact_id) if engagement_contact_role: - result = session.OpportunityContactRole.delete(engagement_contact_role.get("Id")) + result = session.OpportunityContactRole.delete(engagement_contact_role.get("Id")) # type: ignore parse_result(result, f"Salesforce ContactRole delete for {contact_id} with '{service.id}'") except Exception as ex: current_app.logger.error(f"SF_ERR Salesforce ContactRole delete for {contact_id} with '{service.id}' failed: {ex}") -def get_engagement_by_service_id(session: Salesforce, service_id: str) -> Optional[dict[str, Any]]: +def get_engagement_by_service_id(session: Optional[Salesforce], service_id: str) -> Optional[dict[str, Any]]: """Retrieve a Salesforce Engagement by a Notify service ID Args: @@ -184,14 +196,14 @@ def get_engagement_by_service_id(session: Salesforce, service_id: str) -> Option Optional[dict[str, str]]: Salesforce Engagement details or None if can't be found """ result = None - if isinstance(service_id, str) and service_id.strip(): + if isinstance(service_id, str) and service_id.strip() and session is not None: query = f"SELECT Id, Name, ContactId, AccountId FROM Opportunity where CDS_Opportunity_Number__c = '{query_param_sanitize(service_id)}' LIMIT 1" result = query_one(session, query) return result def get_engagement_contact_role( - session: Salesforce, engagement_id: Optional[str], contact_id: Optional[str] + session: Optional[Salesforce], engagement_id: Optional[str], contact_id: Optional[str] ) -> Optional[dict[str, Any]]: """Retrieve a Salesforce Engagement ContactRole. diff --git a/app/clients/salesforce/salesforce_utils.py b/app/clients/salesforce/salesforce_utils.py index 9b9f270547..0cb666112a 100644 --- a/app/clients/salesforce/salesforce_utils.py +++ b/app/clients/salesforce/salesforce_utils.py @@ -26,7 +26,7 @@ def get_name_parts(full_name: str) -> dict[str, str]: } -def query_one(session: Salesforce, query: str) -> Optional[dict[str, Any]]: +def query_one(session: Optional[Salesforce], query: str) -> Optional[dict[str, Any]]: """Execute an SOQL query that expects to return a single record. Args: @@ -38,11 +38,14 @@ def query_one(session: Salesforce, query: str) -> Optional[dict[str, Any]]: """ result = None try: - results = session.query(query) - if results.get("totalSize") == 1: - result = results.get("records")[0] + if session is not None: + results = session.query(query) + if results.get("totalSize") == 1: + result = results.get("records")[0] + else: + current_app.logger.warn(f"SF_WARN Salesforce no results found for query {query}") else: - current_app.logger.warn(f"SF_WARN Salesforce no results found for query {query}") + current_app.logger.error("SF_ERR Salesforce session is None") except Exception as ex: current_app.logger.error(f"SF_ERR Salesforce query {query} failed: {ex}") return result diff --git a/app/clients/sms/aws_pinpoint.py b/app/clients/sms/aws_pinpoint.py new file mode 100644 index 0000000000..bdb3ba7fa7 --- /dev/null +++ b/app/clients/sms/aws_pinpoint.py @@ -0,0 +1,60 @@ +from time import monotonic + +import boto3 +import phonenumbers + +from app.clients.sms import SmsClient + + +class AwsPinpointClient(SmsClient): + """ + AWS Pinpoint SMS client + """ + + def init_app(self, current_app, statsd_client, *args, **kwargs): + self._client = boto3.client("pinpoint-sms-voice-v2", region_name="ca-central-1") + super(AwsPinpointClient, self).__init__(*args, **kwargs) + self.current_app = current_app + self.name = "pinpoint" + self.statsd_client = statsd_client + + def get_name(self): + return self.name + + def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None): + messageType = "TRANSACTIONAL" + matched = False + + if template_id is not None and str(template_id) in self.current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"]: + pool_id = self.current_app.config["AWS_PINPOINT_SC_POOL_ID"] + else: + pool_id = self.current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"] + + for match in phonenumbers.PhoneNumberMatcher(to, "US"): + matched = True + to = phonenumbers.format_number(match.number, phonenumbers.PhoneNumberFormat.E164) + destinationNumber = to + + try: + start_time = monotonic() + response = self._client.send_text_message( + DestinationPhoneNumber=destinationNumber, + OriginationIdentity=pool_id, + MessageBody=content, + MessageType=messageType, + ConfigurationSetName=self.current_app.config["AWS_PINPOINT_CONFIGURATION_SET_NAME"], + ) + except Exception as e: + self.statsd_client.incr("clients.pinpoint.error") + raise Exception(e) + finally: + elapsed_time = monotonic() - start_time + self.current_app.logger.info("AWS Pinpoint request finished in {}".format(elapsed_time)) + self.statsd_client.timing("clients.pinpoint.request-time", elapsed_time) + self.statsd_client.incr("clients.pinpoint.success") + return response["MessageId"] + + if not matched: + self.statsd_client.incr("clients.pinpoint.error") + self.current_app.logger.error("No valid numbers found in {}".format(to)) + raise ValueError("No valid numbers found for SMS delivery") diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index cf6fe3e914..4847754d72 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -2,7 +2,6 @@ from time import monotonic import boto3 -import botocore import phonenumbers from notifications_utils.statsd_decorators import statsd @@ -27,7 +26,7 @@ def get_name(self): return self.name @statsd(namespace="clients.sns") - def send_sms(self, to, content, reference, multi=True, sender=None): + def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None): matched = False for match in phonenumbers.PhoneNumberMatcher(to, "US"): @@ -66,12 +65,9 @@ def send_sms(self, to, content, reference, multi=True, sender=None): try: start_time = monotonic() response = client.publish(PhoneNumber=to, Message=content, MessageAttributes=attributes) - except botocore.exceptions.ClientError as e: - self.statsd_client.incr("clients.sns.error") - raise str(e) except Exception as e: self.statsd_client.incr("clients.sns.error") - raise str(e) + raise e finally: elapsed_time = monotonic() - start_time self.current_app.logger.info("AWS SNS request finished in {}".format(elapsed_time)) diff --git a/app/config.py b/app/config.py index d08a88b854..b8b6521ad5 100644 --- a/app/config.py +++ b/app/config.py @@ -78,18 +78,11 @@ class QueueNames(object): # A queue for the tasks associated with the batch saving NOTIFY_CACHE = "notifiy-cache-tasks" - # For normal send of notifications. This is relatively normal volume and flushed - # pretty quickly. - SEND_NORMAL_QUEUE = "send-{}-tasks" # notification type to be filled in the queue name - # Queues for sending all SMS, except long dedicated numbers. SEND_SMS_HIGH = "send-sms-high" SEND_SMS_MEDIUM = "send-sms-medium" SEND_SMS_LOW = "send-sms-low" - # TODO: Delete this queue once we verify that it is not used anymore. - SEND_SMS = "send-sms-tasks" - # Primarily used for long dedicated numbers sent from us-west-2 upon which # we have a limit to send per second and hence, needs to be throttled. SEND_THROTTLED_SMS = "send-throttled-sms-tasks" @@ -99,9 +92,6 @@ class QueueNames(object): SEND_EMAIL_MEDIUM = "send-email-medium" SEND_EMAIL_LOW = "send-email-low" - # TODO: Delete this queue once we verify that it is not used anymore. - SEND_EMAIL = "send-email-tasks" - # The research mode queue for notifications that are tested by users trying # out Notify. RESEARCH_MODE = "research-mode-tasks" @@ -117,6 +107,7 @@ class QueueNames(object): PROCESS_FTP = "process-ftp-tasks" CREATE_LETTERS_PDF = "create-letters-pdf-tasks" CALLBACKS = "service-callbacks" + CALLBACKS_RETRY = "service-callbacks-retry" # Queue for letters, unused by CDS at this time as we don't use these. LETTERS = "letter-tasks" @@ -158,16 +149,15 @@ def all_queues(): QueueNames.SEND_SMS_HIGH, QueueNames.SEND_SMS_MEDIUM, QueueNames.SEND_SMS_LOW, - QueueNames.SEND_SMS, QueueNames.SEND_THROTTLED_SMS, QueueNames.SEND_EMAIL_HIGH, QueueNames.SEND_EMAIL_MEDIUM, QueueNames.SEND_EMAIL_LOW, - QueueNames.SEND_EMAIL, QueueNames.RESEARCH_MODE, QueueNames.REPORTING, QueueNames.JOBS, QueueNames.RETRY, + QueueNames.CALLBACKS_RETRY, QueueNames.NOTIFY, # QueueNames.CREATE_LETTERS_PDF, QueueNames.CALLBACKS, @@ -276,6 +266,10 @@ class Config(object): AWS_SES_ACCESS_KEY = os.getenv("AWS_SES_ACCESS_KEY") AWS_SES_SECRET_KEY = os.getenv("AWS_SES_SECRET_KEY") AWS_PINPOINT_REGION = os.getenv("AWS_PINPOINT_REGION", "us-west-2") + AWS_PINPOINT_SC_POOL_ID = os.getenv("AWS_PINPOINT_SC_POOL_ID", "") + AWS_PINPOINT_DEFAULT_POOL_ID = os.getenv("AWS_PINPOINT_DEFAULT_POOL_ID", "") + AWS_PINPOINT_CONFIGURATION_SET_NAME = os.getenv("AWS_PINPOINT_CONFIGURATION_SET_NAME", "pinpoint-configuration") + AWS_PINPOINT_SC_TEMPLATE_IDS = env.list("AWS_PINPOINT_SC_TEMPLATE_IDS", []) AWS_US_TOLL_FREE_NUMBER = os.getenv("AWS_US_TOLL_FREE_NUMBER") CSV_UPLOAD_BUCKET_NAME = os.getenv("CSV_UPLOAD_BUCKET_NAME", "notification-alpha-canada-ca-csv-upload") ASSET_DOMAIN = os.getenv("ASSET_DOMAIN", "assets.notification.canada.ca") @@ -310,6 +304,7 @@ class Config(object): INVITATION_EMAIL_TEMPLATE_ID = "4f46df42-f795-4cc4-83bb-65ca312f49cc" SMS_CODE_TEMPLATE_ID = "36fb0730-6259-4da1-8a80-c8de22ad4246" EMAIL_2FA_TEMPLATE_ID = "299726d2-dba6-42b8-8209-30e1d66ea164" + EMAIL_MAGIC_LINK_TEMPLATE_ID = "6e97fd09-6da0-4cc8-829d-33cf5b818103" NEW_USER_EMAIL_VERIFICATION_TEMPLATE_ID = "ece42649-22a8-4d06-b87f-d52d5d3f0a27" PASSWORD_RESET_TEMPLATE_ID = "474e9242-823b-4f99-813d-ed392e7f1201" FORCED_PASSWORD_RESET_TEMPLATE_ID = "e9a65a6b-497b-42f2-8f43-1736e43e13b3" @@ -367,6 +362,7 @@ class Config(object): "app.celery.scheduled_tasks", "app.celery.reporting_tasks", "app.celery.nightly_tasks", + "app.celery.process_pinpoint_receipts_tasks", ) CELERYBEAT_SCHEDULE = { # app/celery/scheduled_tasks.py diff --git a/app/dao/api_key_dao.py b/app/dao/api_key_dao.py index f6a707bf76..1c9348d018 100644 --- a/app/dao/api_key_dao.py +++ b/app/dao/api_key_dao.py @@ -83,13 +83,30 @@ def update_compromised_api_key_info(service_id, api_key_id, compromised_info): db.session.add(api_key) -def get_api_key_by_secret(secret): - signed_with_all_keys = signer_api_key.sign_with_all_keys(str(secret)) +def get_api_key_by_secret(secret, service_id=None): + # Check the first part of the secret is the gc prefix + if current_app.config["API_KEY_PREFIX"] != secret[: len(current_app.config["API_KEY_PREFIX"])]: + raise ValueError() + + # Check if the remaining part of the secret is a the valid api key + token = secret[-36:] + signed_with_all_keys = signer_api_key.sign_with_all_keys(str(token)) for signed_secret in signed_with_all_keys: try: - return db.on_reader().query(ApiKey).filter_by(_secret=signed_secret).options(joinedload("service")).one() + api_key = db.on_reader().query(ApiKey).filter_by(_secret=signed_secret).options(joinedload("service")).one() except NoResultFound: - pass + raise NoResultFound() + + # Check the middle portion of the secret is the valid service id + if api_key and api_key.service_id: + if len(secret) >= 79: + service_id_from_token = str(secret[-73:-37]) + if str(api_key.service_id) != service_id_from_token: + raise ValueError() + else: + raise ValueError() + if api_key: + return api_key raise NoResultFound() diff --git a/app/dao/email_branding_dao.py b/app/dao/email_branding_dao.py index d8738e9200..1ed90ae1e6 100644 --- a/app/dao/email_branding_dao.py +++ b/app/dao/email_branding_dao.py @@ -3,7 +3,9 @@ from app.models import EmailBranding -def dao_get_email_branding_options(): +def dao_get_email_branding_options(filter_by_organisation_id=None): + if filter_by_organisation_id: + return EmailBranding.query.filter_by(organisation_id=filter_by_organisation_id).all() return EmailBranding.query.all() diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 3b945b15fc..28a8b1f15d 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -1,5 +1,6 @@ import uuid from datetime import datetime, timedelta +from typing import Iterable from flask import current_app from notifications_utils.letter_timings import ( @@ -71,9 +72,15 @@ def dao_get_job_by_id(job_id) -> Job: return Job.query.filter_by(id=job_id).one() -def dao_archive_job(job): - job.archived = True - db.session.add(job) +def dao_archive_jobs(jobs: Iterable[Job]): + """ + Archive the given jobs. + Args: + jobs (Iterable[Job]): The jobs to archive. + """ + for job in jobs: + job.archived = True + db.session.add(job) db.session.commit() @@ -129,7 +136,7 @@ def dao_update_job(job): db.session.commit() -def dao_get_jobs_older_than_data_retention(notification_types): +def dao_get_jobs_older_than_data_retention(notification_types, limit=None): flexible_data_retention = ServiceDataRetention.query.filter( ServiceDataRetention.notification_type.in_(notification_types) ).all() @@ -137,8 +144,7 @@ def dao_get_jobs_older_than_data_retention(notification_types): today = datetime.utcnow().date() for f in flexible_data_retention: end_date = today - timedelta(days=f.days_of_retention) - - jobs.extend( + query = ( Job.query.join(Template) .filter( func.coalesce(Job.scheduled_for, Job.created_at) < end_date, @@ -147,13 +153,15 @@ def dao_get_jobs_older_than_data_retention(notification_types): Job.service_id == f.service_id, ) .order_by(desc(Job.created_at)) - .all() ) + if limit: + query = query.limit(limit - len(jobs)) + jobs.extend(query.all()) end_date = today - timedelta(days=7) for notification_type in notification_types: services_with_data_retention = [x.service_id for x in flexible_data_retention if x.notification_type == notification_type] - jobs.extend( + query = ( Job.query.join(Template) .filter( func.coalesce(Job.scheduled_for, Job.created_at) < end_date, @@ -162,8 +170,10 @@ def dao_get_jobs_older_than_data_retention(notification_types): Job.service_id.notin_(services_with_data_retention), ) .order_by(desc(Job.created_at)) - .all() ) + if limit: + query = query.limit(limit - len(jobs)) + jobs.extend(query.all()) return jobs diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 9e20bca476..e88df7cbee 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -2,7 +2,6 @@ import string from datetime import datetime, timedelta -from boto.exception import BotoClientError from flask import current_app from itsdangerous import BadSignature from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES @@ -25,11 +24,9 @@ from werkzeug.datastructures import MultiDict from app import create_uuid, db, signer_personalisation -from app.aws.s3 import get_s3_bucket_objects, remove_s3_object from app.dao.dao_utils import transactional from app.dao.date_util import utc_midnight_n_days_ago from app.errors import InvalidRequest -from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE from app.models import ( EMAIL_TYPE, KEY_TYPE_TEST, @@ -391,9 +388,6 @@ def delete_notifications_older_than_retention_by_type(notification_type, qry_lim convert_utc_to_local_timezone(datetime.utcnow()).date() ) - timedelta(days=f.days_of_retention) - if notification_type == LETTER_TYPE: - _delete_letters_from_s3(notification_type, f.service_id, days_of_retention, qry_limit) - insert_update_notification_history(notification_type, days_of_retention, f.service_id) current_app.logger.info("Deleting {} notifications for service id: {}".format(notification_type, f.service_id)) @@ -409,8 +403,6 @@ def delete_notifications_older_than_retention_by_type(notification_type, qry_lim for row in service_ids_to_purge: service_id = row._mapping["id"] - if notification_type == LETTER_TYPE: - _delete_letters_from_s3(notification_type, service_id, seven_days_ago, qry_limit) insert_update_notification_history(notification_type, seven_days_ago, service_id) deleted += _delete_notifications(notification_type, seven_days_ago, service_id, qry_limit) @@ -486,38 +478,6 @@ def insert_update_notification_history(notification_type, date_to_delete_from, s db.session.commit() -def _delete_letters_from_s3(notification_type, service_id, date_to_delete_from, query_limit): - letters_to_delete_from_s3 = ( - db.session.query(Notification) - .filter( - Notification.notification_type == notification_type, - Notification.created_at < date_to_delete_from, - Notification.service_id == service_id, - ) - .limit(query_limit) - .all() - ) - for letter in letters_to_delete_from_s3: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - if letter.sent_at: - sent_at = str(letter.sent_at.date()) - prefix = LETTERS_PDF_FILE_LOCATION_STRUCTURE.format( - folder=sent_at + "/", - reference=letter.reference, - duplex="D", - letter_class="2", - colour="C", - crown="C" if letter.service.crown else "N", - date="", - ).upper()[:-5] - s3_objects = get_s3_bucket_objects(bucket_name=bucket_name, subfolder=prefix) - for s3_object in s3_objects: - try: - remove_s3_object(bucket_name, s3_object["Key"]) - except BotoClientError: - current_app.logger.exception("Could not delete S3 object with filename: {}".format(s3_object["Key"])) - - @statsd(namespace="dao") @transactional def dao_delete_notifications_by_id(notification_id): diff --git a/app/dao/organisation_dao.py b/app/dao/organisation_dao.py index 8c2ef63ddd..06ed25958d 100644 --- a/app/dao/organisation_dao.py +++ b/app/dao/organisation_dao.py @@ -2,7 +2,14 @@ from app import db from app.dao.dao_utils import transactional, version_class -from app.models import Domain, InvitedOrganisationUser, Organisation, Service, User +from app.models import ( + Domain, + EmailBranding, + InvitedOrganisationUser, + Organisation, + Service, + User, +) def dao_get_organisations(): @@ -55,6 +62,10 @@ def dao_update_organisation(organisation_id, **kwargs): domains = kwargs.pop("domains", None) num_updated = Organisation.query.filter_by(id=organisation_id).update(kwargs) + if "email_branding_id" in kwargs: + email_brand = EmailBranding.query.filter_by(id=kwargs["email_branding_id"]).one() + org = Organisation.query.get(organisation_id) + org.email_branding = email_brand if isinstance(domains, list): Domain.query.filter_by(organisation_id=organisation_id).delete() diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 1e36ace594..281bfa6cf9 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -313,8 +313,6 @@ def dao_create_service( if organisation: service.organisation_id = organisation.id service.organisation_type = organisation.organisation_type - if organisation.email_branding: - service.email_branding = organisation.email_branding if organisation.letter_branding and not service.letter_branding: service.letter_branding = organisation.letter_branding diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index 33590c7667..5ef24c0769 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -2,9 +2,10 @@ import os import re from datetime import datetime -from typing import Dict +from typing import Any, Dict, Optional from uuid import UUID +import phonenumbers from flask import current_app from notifications_utils.recipients import ( validate_and_format_email_address, @@ -46,7 +47,9 @@ NOTIFICATION_SENT, NOTIFICATION_TECHNICAL_FAILURE, NOTIFICATION_VIRUS_SCAN_FAILED, + PINPOINT_PROVIDER, SMS_TYPE, + SNS_PROVIDER, BounceRateStatus, Notification, Service, @@ -66,8 +69,10 @@ def send_sms_to_provider(notification): provider = provider_to_use( SMS_TYPE, notification.id, + notification.to, notification.international, notification.reply_to_text, + template_id=notification.template_id, ) template_dict = dao_get_template_by_id(notification.template_id, notification.template_version).__dict__ @@ -103,6 +108,7 @@ def send_sms_to_provider(notification): content=str(template), reference=str(notification.id), sender=notification.reply_to_text, + template_id=notification.template_id, ) except Exception as e: notification.billable_units = template.fragment_count @@ -334,10 +340,60 @@ def update_notification_to_sending(notification, provider): dao_update_notification(notification) -def provider_to_use(notification_type, notification_id, international=False, sender=None): - active_providers_in_order = [ - p for p in get_provider_details_by_notification_type(notification_type, international) if p.active - ] +def provider_to_use( + notification_type: str, + notification_id: UUID, + to: Optional[str] = None, + international: bool = False, + sender: Optional[str] = None, + template_id: Optional[UUID] = None, +) -> Any: + """ + Get the provider to use for sending the notification. + SMS that are being sent with a dedicated number or to a US number should not use Pinpoint. + + Args: + notification_type (str): SMS or EMAIL. + notification_id (UUID): id of notification. Just used for logging. + to (str, optional): recipient. Defaults to None. + international (bool, optional): Recipient is international. Defaults to False. + sender (str, optional): reply_to_text to use. Defaults to None. + template_id (str, optional): template_id to use. Defaults to None. + + Raises: + Exception: No active providers. + + Returns: + provider: Provider to use to send the notification. + """ + + has_dedicated_number = sender is not None and sender.startswith("+1") + sending_to_us_number = False + if to is not None: + match = next(iter(phonenumbers.PhoneNumberMatcher(to, "US")), None) + if match and phonenumbers.region_code_for_number(match.number) == "US": + sending_to_us_number = True + + using_sc_pool_template = template_id is not None and str(template_id) in current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"] + + do_not_use_pinpoint = ( + has_dedicated_number + or sending_to_us_number + or not current_app.config["AWS_PINPOINT_SC_POOL_ID"] + or ((not current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"]) and not using_sc_pool_template) + ) + if do_not_use_pinpoint: + active_providers_in_order = [ + p + for p in get_provider_details_by_notification_type(notification_type, international) + if p.active and p.identifier != PINPOINT_PROVIDER + ] + else: + active_providers_in_order = [ + p + for p in get_provider_details_by_notification_type(notification_type, international) + if p.active and p.identifier != SNS_PROVIDER + ] if not active_providers_in_order: current_app.logger.error("{} {} failed as no active providers".format(notification_type, notification_id)) @@ -353,12 +409,16 @@ def get_html_email_options(service: Service): "fip_banner_english": False, "fip_banner_french": True, "logo_with_background_colour": False, + "alt_text_en": None, + "alt_text_fr": None, } else: return { "fip_banner_english": True, "fip_banner_french": False, "logo_with_background_colour": False, + "alt_text_en": None, + "alt_text_fr": None, } logo_url = get_logo_url(service.email_branding.logo) if service.email_branding.logo else None @@ -371,6 +431,8 @@ def get_html_email_options(service: Service): "brand_logo": logo_url, "brand_text": service.email_branding.text, "brand_name": service.email_branding.name, + "alt_text_en": service.email_branding.alt_text_en, + "alt_text_fr": service.email_branding.alt_text_fr, } diff --git a/app/email_branding/email_branding_schema.py b/app/email_branding/email_branding_schema.py index b7070eafba..06366bb8c1 100644 --- a/app/email_branding/email_branding_schema.py +++ b/app/email_branding/email_branding_schema.py @@ -10,8 +10,10 @@ "text": {"type": ["string", "null"]}, "logo": {"type": ["string", "null"]}, "brand_type": {"enum": BRANDING_TYPES}, + "alt_text_en": {"type": "string"}, + "alt_text_fr": {"type": "string"}, }, - "required": ["name"], + "required": ["name", "alt_text_en", "alt_text_fr"], } post_update_email_branding_schema = { @@ -24,6 +26,8 @@ "text": {"type": ["string", "null"]}, "logo": {"type": ["string", "null"]}, "brand_type": {"enum": BRANDING_TYPES}, + "alt_text_en": {"type": "string"}, + "alt_text_fr": {"type": "string"}, }, "required": [], } diff --git a/app/email_branding/rest.py b/app/email_branding/rest.py index 3dc5086148..6ae95745be 100644 --- a/app/email_branding/rest.py +++ b/app/email_branding/rest.py @@ -20,7 +20,10 @@ @email_branding_blueprint.route("", methods=["GET"]) def get_email_branding_options(): - email_branding_options = [o.serialize() for o in dao_get_email_branding_options()] + filter_by_organisation_id = request.args.get("organisation_id", None) + email_branding_options = [ + o.serialize() for o in dao_get_email_branding_options(filter_by_organisation_id=filter_by_organisation_id) + ] return jsonify(email_branding=email_branding_options) diff --git a/app/job/rest.py b/app/job/rest.py index 950f1554c9..28bfeafd33 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -167,28 +167,40 @@ def create_job(service_id): ) if template.template_type == SMS_TYPE: + # set sender_id if missing + default_senders = [x for x in service.service_sms_senders if x.is_default] + default_sender_id = default_senders[0].id if default_senders else None + data["sender_id"] = data.get("sender_id", default_sender_id) + # calculate the number of simulated recipients - numberOfSimulated = sum( - simulated_recipient(i["phone_number"].data, template.template_type) for i in list(recipient_csv.get_rows()) - ) - mixedRecipients = numberOfSimulated > 0 and numberOfSimulated != len(list(recipient_csv.get_rows())) + numberOfSimulated = sum(simulated_recipient(i["phone_number"].data, template.template_type) for i in recipient_csv.rows) + mixedRecipients = numberOfSimulated > 0 and numberOfSimulated != len(recipient_csv) # if they have specified testing and NON-testing recipients, raise an error if mixedRecipients: raise InvalidRequest(message="Bulk sending to testing and non-testing numbers is not supported", status_code=400) - is_test_notification = len(list(recipient_csv.get_rows())) == numberOfSimulated + is_test_notification = len(recipient_csv) == numberOfSimulated if not is_test_notification: check_sms_daily_limit(service, len(recipient_csv)) increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv)) elif template.template_type == EMAIL_TYPE: - check_email_daily_limit(service, len(list(recipient_csv.get_rows()))) + if "notification_count" in data: + notification_count = int(data["notification_count"]) + else: + current_app.logger.warning( + f"notification_count not in metadata for job {data['id']}, using len(recipient_csv) instead." + ) + notification_count = len(recipient_csv) + + check_email_daily_limit(service, notification_count) + scheduled_for = datetime.fromisoformat(data.get("scheduled_for")) if data.get("scheduled_for") else None if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): - increment_email_daily_count_send_warnings_if_needed(service, len(list(recipient_csv.get_rows()))) + increment_email_daily_count_send_warnings_if_needed(service, notification_count) data.update({"template_version": template.version}) diff --git a/app/letters/rest.py b/app/letters/rest.py index 21c80f2432..87684e0b66 100644 --- a/app/letters/rest.py +++ b/app/letters/rest.py @@ -1,7 +1,5 @@ -from flask import Blueprint, jsonify, request +from flask import Blueprint -from app.letters.letter_schemas import letter_references -from app.schema_validation import validate from app.v2.errors import register_errors letter_job = Blueprint("letter-job", __name__) @@ -10,6 +8,4 @@ @letter_job.route("/letters/returned", methods=["POST"]) def create_process_returned_letters_job(): - references = validate(request.get_json(), letter_references) - - return jsonify(references=references["references"]), 200 + pass diff --git a/app/letters/utils.py b/app/letters/utils.py index 6369b22040..8d5bcab489 100644 --- a/app/letters/utils.py +++ b/app/letters/utils.py @@ -1,21 +1,6 @@ -import io -import math -from datetime import datetime, timedelta from enum import Enum -import boto3 -from flask import current_app -from notifications_utils.letter_timings import LETTER_PROCESSING_DEADLINE -from notifications_utils.pdf import pdf_page_count -from notifications_utils.s3 import s3upload -from notifications_utils.timezones import convert_utc_to_local_timezone - -from app.models import ( - KEY_TYPE_TEST, - NOTIFICATION_VALIDATION_FAILED, - RESOLVE_POSTAGE_FOR_FILE_NAME, - SECOND_CLASS, -) +from app.models import SECOND_CLASS class ScanErrorType(Enum): @@ -29,203 +14,64 @@ class ScanErrorType(Enum): def get_folder_name(_now, is_test_or_scan_letter=False): - if is_test_or_scan_letter: - folder_name = "" - else: - print_datetime = convert_utc_to_local_timezone(_now) - if print_datetime.time() > LETTER_PROCESSING_DEADLINE: - print_datetime += timedelta(days=1) - folder_name = "{}/".format(print_datetime.date()) - return folder_name + pass def get_letter_pdf_filename(reference, crown, is_scan_letter=False, postage=SECOND_CLASS): - now = datetime.utcnow() - - upload_file_name = LETTERS_PDF_FILE_LOCATION_STRUCTURE.format( - folder=get_folder_name(now, is_scan_letter), - reference=reference, - duplex="D", - letter_class=RESOLVE_POSTAGE_FOR_FILE_NAME[postage], - colour="C", - crown="C" if crown else "N", - date=now.strftime("%Y%m%d%H%M%S"), - ).upper() - - return upload_file_name + pass def get_bucket_name_and_prefix_for_notification(notification): - folder = "" - if notification.status == NOTIFICATION_VALIDATION_FAILED: - bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - elif notification.key_type == KEY_TYPE_TEST: - bucket_name = current_app.config["TEST_LETTERS_BUCKET_NAME"] - else: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - if notification.sent_at: - folder = "{}/".format(notification.sent_at.date()) - elif notification.updated_at: - folder = get_folder_name(notification.updated_at, False) - else: - folder = get_folder_name(notification.created_at, False) - - upload_file_name = PRECOMPILED_BUCKET_PREFIX.format(folder=folder, reference=notification.reference).upper() - - return bucket_name, upload_file_name + pass def get_reference_from_filename(filename): - # filename looks like '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF' - filename_parts = filename.split(".") - return filename_parts[1] + pass def upload_letter_pdf(notification, pdf_data, precompiled=False): - current_app.logger.info( - "PDF Letter {} reference {} created at {}, {} bytes".format( - notification.id, - notification.reference, - notification.created_at, - len(pdf_data), - ) - ) - - upload_file_name = get_letter_pdf_filename( - notification.reference, - notification.service.crown, - is_scan_letter=precompiled or notification.key_type == KEY_TYPE_TEST, - postage=notification.postage, - ) - - if precompiled: - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - elif notification.key_type == KEY_TYPE_TEST: - bucket_name = current_app.config["TEST_LETTERS_BUCKET_NAME"] - else: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - - s3upload( - filedata=pdf_data, - region=current_app.config["AWS_REGION"], - bucket_name=bucket_name, - file_location=upload_file_name, - ) - - current_app.logger.info( - "Uploaded letters PDF {} to {} for notification id {}".format(upload_file_name, bucket_name, notification.id) - ) - return upload_file_name + pass def move_failed_pdf(source_filename, scan_error_type): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - target_filename = ("ERROR/" if scan_error_type == ScanErrorType.ERROR else "FAILURE/") + source_filename - - _move_s3_object(scan_bucket, source_filename, scan_bucket, target_filename) + pass def copy_redaction_failed_pdf(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - target_filename = "REDACTION_FAILURE/" + source_filename - - _copy_s3_object(scan_bucket, source_filename, scan_bucket, target_filename) + pass def move_error_pdf_to_scan_bucket(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - error_file = "ERROR/" + source_filename - - _move_s3_object(scan_bucket, error_file, scan_bucket, source_filename) + pass def move_scan_to_invalid_pdf_bucket(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - invalid_pdf_bucket = current_app.config["INVALID_PDF_BUCKET_NAME"] - _move_s3_object(scan_bucket, source_filename, invalid_pdf_bucket, source_filename) + pass def move_uploaded_pdf_to_letters_bucket(source_filename, upload_filename): - _move_s3_object( - source_bucket=current_app.config["TRANSIENT_UPLOADED_LETTERS"], - source_filename=source_filename, - target_bucket=current_app.config["LETTERS_PDF_BUCKET_NAME"], - target_filename=upload_filename, - ) + pass def get_file_names_from_error_bucket(): - s3 = boto3.resource("s3") - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - bucket = s3.Bucket(scan_bucket) - - return bucket.objects.filter(Prefix="ERROR") + pass def get_letter_pdf(notification): - bucket_name, prefix = get_bucket_name_and_prefix_for_notification(notification) - - s3 = boto3.resource("s3") - bucket = s3.Bucket(bucket_name) - item = next(x for x in bucket.objects.filter(Prefix=prefix)) - - obj = s3.Object(bucket_name=bucket_name, key=item.key) - return obj.get()["Body"].read() + pass def _move_s3_object(source_bucket, source_filename, target_bucket, target_filename): - s3 = boto3.resource("s3") - copy_source = {"Bucket": source_bucket, "Key": source_filename} - - target_bucket = s3.Bucket(target_bucket) - obj = target_bucket.Object(target_filename) - - # Tags are copied across but the expiration time is reset in the destination bucket - # e.g. if a file has 5 days left to expire on a ONE_WEEK retention in the source bucket, - # in the destination bucket the expiration time will be reset to 7 days left to expire - obj.copy(copy_source, ExtraArgs={"ServerSideEncryption": "AES256"}) - - s3.Object(source_bucket, source_filename).delete() - - current_app.logger.info( - "Moved letter PDF: {}/{} to {}/{}".format(source_bucket, source_filename, target_bucket, target_filename) - ) + pass def _copy_s3_object(source_bucket, source_filename, target_bucket, target_filename): - s3 = boto3.resource("s3") - copy_source = {"Bucket": source_bucket, "Key": source_filename} - - target_bucket = s3.Bucket(target_bucket) - obj = target_bucket.Object(target_filename) - - # Tags are copied across but the expiration time is reset in the destination bucket - # e.g. if a file has 5 days left to expire on a ONE_WEEK retention in the source bucket, - # in the destination bucket the expiration time will be reset to 7 days left to expire - obj.copy(copy_source, ExtraArgs={"ServerSideEncryption": "AES256"}) - - current_app.logger.info( - "Copied letter PDF: {}/{} to {}/{}".format(source_bucket, source_filename, target_bucket, target_filename) - ) + pass def letter_print_day(created_at): - bst_print_datetime = convert_utc_to_local_timezone(created_at) + timedelta(hours=6, minutes=30) - bst_print_date = bst_print_datetime.date() - - current_bst_date = convert_utc_to_local_timezone(datetime.utcnow()).date() - - if bst_print_date >= current_bst_date: - return "today" - else: - print_date = bst_print_datetime.strftime("%d %B").lstrip("0") - return "on {}".format(print_date) + pass def get_page_count(pdf): - pages = pdf_page_count(io.BytesIO(pdf)) - pages_per_sheet = 2 - billable_units = math.ceil(pages / pages_per_sheet) - return billable_units + pass diff --git a/app/models.py b/app/models.py index 704ccf798a..f79867918e 100644 --- a/app/models.py +++ b/app/models.py @@ -276,6 +276,12 @@ class EmailBranding(BaseModel): nullable=False, default=BRANDING_ORG_NEW, ) + organisation_id = db.Column( + UUID(as_uuid=True), db.ForeignKey("organisation.id", ondelete="SET NULL"), index=True, nullable=True + ) + organisation = db.relationship("Organisation", back_populates="email_branding", foreign_keys=[organisation_id]) + alt_text_en = db.Column(db.String(), nullable=True) + alt_text_fr = db.Column(db.String(), nullable=True) def serialize(self) -> dict: serialized = { @@ -285,6 +291,9 @@ def serialize(self) -> dict: "name": self.name, "text": self.text, "brand_type": self.brand_type, + "organisation_id": str(self.organisation_id) if self.organisation_id else "", + "alt_text_en": self.alt_text_en, + "alt_text_fr": self.alt_text_fr, } return serialized @@ -449,10 +458,9 @@ class Organisation(BaseModel): "Domain", ) - email_branding = db.relationship("EmailBranding") + email_branding = db.relationship("EmailBranding", uselist=False) email_branding_id = db.Column( UUID(as_uuid=True), - db.ForeignKey("email_branding.id"), nullable=True, ) @@ -1268,9 +1276,10 @@ def get_link(self): SNS_PROVIDER = "sns" +PINPOINT_PROVIDER = "pinpoint" SES_PROVIDER = "ses" -SMS_PROVIDERS = [SNS_PROVIDER] +SMS_PROVIDERS = [SNS_PROVIDER, PINPOINT_PROVIDER] EMAIL_PROVIDERS = [SES_PROVIDER] PROVIDERS = SMS_PROVIDERS + EMAIL_PROVIDERS diff --git a/app/service/rest.py b/app/service/rest.py index 8ecf13f47d..8ba79097e3 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -11,7 +11,6 @@ over_email_daily_limit_cache_key, over_sms_daily_limit_cache_key, ) -from notifications_utils.letter_timings import letter_can_be_cancelled from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy import func from sqlalchemy.exc import IntegrityError @@ -53,13 +52,6 @@ dao_get_reply_to_by_service_id, update_reply_to_email_address, ) -from app.dao.service_letter_contact_dao import ( - add_letter_contact_for_service, - archive_letter_contact, - dao_get_letter_contact_by_id, - dao_get_letter_contacts_by_service_id, - update_letter_contact, -) from app.dao.service_safelist_dao import ( dao_add_and_commit_safelisted_contacts, dao_fetch_service_safelist, @@ -93,7 +85,6 @@ from app.dao.templates_dao import dao_get_template_by_id from app.dao.users_dao import get_user_by_id from app.errors import InvalidRequest, register_errors -from app.letters.utils import letter_print_day from app.models import ( KEY_TYPE_NORMAL, LETTER_TYPE, @@ -117,10 +108,7 @@ service_schema, ) from app.service import statistics -from app.service.send_notification import ( - send_one_off_notification, - send_pdf_letter_notification, -) +from app.service.send_notification import send_one_off_notification from app.service.sender import send_notification_to_service_users from app.service.service_data_retention_schema import ( add_service_data_retention_request, @@ -128,7 +116,6 @@ ) from app.service.service_senders_schema import ( add_service_email_reply_to_request, - add_service_letter_contact_block_request, add_service_sms_sender_request, ) from app.service.utils import ( @@ -575,13 +562,6 @@ def cancel_notification_for_service(service_id, notification_id): "Notification cannot be cancelled - only letters can be cancelled", status_code=400, ) - elif not letter_can_be_cancelled(notification.status, notification.created_at): - print_day = letter_print_day(notification.created_at) - - raise InvalidRequest( - "It’s too late to cancel this letter. Printing started {} at 5.30pm".format(print_day), - status_code=400, - ) updated_notification = notifications_dao.update_notification_status_by_id( notification_id, @@ -793,8 +773,7 @@ def create_one_off_notification(service_id): @service_blueprint.route("//send-pdf-letter", methods=["POST"]) def create_pdf_letter(service_id): - resp = send_pdf_letter_notification(service_id, request.get_json()) - return jsonify(resp), 201 + pass @service_blueprint.route("//email-reply-to", methods=["GET"]) @@ -872,41 +851,22 @@ def delete_service_reply_to_email_address(service_id, reply_to_email_id): @service_blueprint.route("//letter-contact", methods=["GET"]) def get_letter_contacts(service_id): - result = dao_get_letter_contacts_by_service_id(service_id) - return jsonify([i.serialize() for i in result]), 200 + pass @service_blueprint.route("//letter-contact/", methods=["GET"]) def get_letter_contact_by_id(service_id, letter_contact_id): - result = dao_get_letter_contact_by_id(service_id=service_id, letter_contact_id=letter_contact_id) - return jsonify(result.serialize()), 200 + pass @service_blueprint.route("//letter-contact", methods=["POST"]) def add_service_letter_contact(service_id): - # validate the service exists, throws ResultNotFound exception. - dao_fetch_service_by_id(service_id) - form = validate(request.get_json(), add_service_letter_contact_block_request) - new_letter_contact = add_letter_contact_for_service( - service_id=service_id, - contact_block=form["contact_block"], - is_default=form.get("is_default", True), - ) - return jsonify(data=new_letter_contact.serialize()), 201 + pass @service_blueprint.route("//letter-contact/", methods=["POST"]) def update_service_letter_contact(service_id, letter_contact_id): - # validate the service exists, throws ResultNotFound exception. - dao_fetch_service_by_id(service_id) - form = validate(request.get_json(), add_service_letter_contact_block_request) - new_reply_to = update_letter_contact( - service_id=service_id, - letter_contact_id=letter_contact_id, - contact_block=form["contact_block"], - is_default=form.get("is_default", True), - ) - return jsonify(data=new_reply_to.serialize()), 200 + pass @service_blueprint.route( @@ -914,9 +874,7 @@ def update_service_letter_contact(service_id, letter_contact_id): methods=["POST"], ) def delete_service_letter_contact(service_id, letter_contact_id): - archived_letter_contact = archive_letter_contact(service_id, letter_contact_id) - - return jsonify(data=archived_letter_contact.serialize()), 200 + pass @service_blueprint.route("//sms-sender", methods=["POST"]) diff --git a/app/user/contact_request.py b/app/user/contact_request.py index 7317a69302..cfca30cafb 100644 --- a/app/user/contact_request.py +++ b/app/user/contact_request.py @@ -16,6 +16,7 @@ class ContactRequest: name: str = field(default="") message: str = field(default="") user_profile: str = field(default="") + organisation_id: str = field(default="") department_org_name: str = field(default="") program_service_name: str = field(default="") intended_recipients: str = field(default="") @@ -30,6 +31,9 @@ class ContactRequest: notification_types: str = field(default="") expected_volume: str = field(default="") branding_url: str = field(default="") + branding_logo_name: str = field(default="") + alt_text_en: str = field(default="") + alt_text_fr: str = field(default="") def __post_init__(self): # email address is mandatory for us diff --git a/app/user/rest.py b/app/user/rest.py index d6f41c8fd5..ea28646d41 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -497,7 +497,12 @@ def send_branding_request(user_id): email_address=user.email_address, service_id=data["serviceID"], service_name=data["service_name"], + organisation_id=data["organisation_id"], + department_org_name=data["organisation_name"], branding_url=get_logo_url(data["filename"]), + branding_logo_name=data["branding_logo_name"] if "branding_logo_name" in data else "", + alt_text_en=data["alt_text_en"], + alt_text_fr=data["alt_text_fr"], ) contact.tags = ["z_skip_opsgenie", "z_skip_urgent_escalation"] diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index 9a1640b9a2..d8edf8627b 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -7,6 +7,7 @@ import werkzeug from flask import abort, current_app, jsonify, request +from notifications_utils import SMS_CHAR_COUNT_LIMIT from notifications_utils.recipients import ( RecipientCSV, try_validate_and_format_phone_number, @@ -229,6 +230,12 @@ def post_bulk(): increment_email_daily_count_send_warnings_if_needed(authenticated_service, len(list(recipient_csv.get_rows()))) if template.template_type == SMS_TYPE: + # set sender_id if missing + if form["validated_sender_id"] is None: + default_senders = [x for x in authenticated_service.service_sms_senders if x.is_default] + default_sender_id = default_senders[0].id if default_senders else None + form["validated_sender_id"] = default_sender_id + # calculate the number of simulated recipients numberOfSimulated = sum( simulated_recipient(i["phone_number"].data, template.template_type) for i in list(recipient_csv.get_rows()) @@ -699,6 +706,12 @@ def check_for_csv_errors(recipient_csv, max_rows, remaining_messages): message=f"You cannot send to these recipients {explanation}", status_code=400, ) + if recipient_csv.template_type == SMS_TYPE and any(recipient_csv.rows_with_combined_variable_content_too_long): + raise BadRequestError( + message=f"Row {next(recipient_csv.rows_with_combined_variable_content_too_long).index + 1} - has a character count greater than {SMS_CHAR_COUNT_LIMIT} characters. Some messages may be too long due to custom content.", + status_code=400, + ) + if recipient_csv.rows_with_errors: def row_error(row): diff --git a/bin/execute_and_publish_performance_test.sh b/bin/execute_and_publish_performance_test.sh index ef7fe0a5bf..fa2d50b05a 100755 --- a/bin/execute_and_publish_performance_test.sh +++ b/bin/execute_and_publish_performance_test.sh @@ -1,13 +1,24 @@ #!/bin/bash +# Setup current_time=$(date "+%Y.%m.%d-%H.%M.%S") perf_test_aws_s3_bucket=${PERF_TEST_AWS_S3_BUCKET:-notify-performance-test-results-staging} perf_test_csv_directory_path=${PERF_TEST_CSV_DIRECTORY_PATH:-/tmp/notify_performance_test} - mkdir -p $perf_test_csv_directory_path/$current_time +# Run old performance test and copy results to S3 locust --headless --config tests-perf/locust/locust.conf --html $perf_test_csv_directory_path/$current_time/index.html --csv $perf_test_csv_directory_path/$current_time/perf_test - aws s3 cp $perf_test_csv_directory_path/ "s3://$perf_test_aws_s3_bucket" --recursive || exit 1 +# Sleep 15 minutes to allow the system to stabilize +sleep 900 + +# Run email send rate performance test +# This configuration should send 10K emails / minute for 10 minutes for 100K emails total. +# We run this test on Tuesday through Friday (just after midnight UTC) only. +if [ "$(date +%u)" -ge 2 ] && [ "$(date +%u)" -le 5 ]; then + locust --headless --host https://api.staging.notification.cdssandbox.xyz --locustfile tests-perf/locust/send_rate_email.py --users 5 --run-time 10m --spawn-rate 1 +fi + +# Cleanup rm -rf $perf_test_csv_directory_path/$current_time diff --git a/catalog-info.yaml b/catalog-info.yaml index ca14ec9b7f..5d62d960c8 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -1,11 +1,18 @@ +# Metadata for the backstage catalog accessible at this link: +# https://backstage.cdssandbox.xyz/ +--- apiVersion: backstage.io/v1alpha1 kind: Component metadata: - name: notification-api - description: GC Notify API | GC Notification API + name: notification-api-service + title: GC Notify API | GC Notification API + description: REST API service for GC Notification + annotations: + github.com/project-slug: cds-snc/notification-api labels: license: MIT spec: - type: website - lifecycle: experimental - owner: cds-snc + type: service + lifecycle: production + owner: group:cds-snc/notify-dev + system: gc-notification diff --git a/ci/Dockerfile.test b/ci/Dockerfile.test index e068dfbfd5..3a5874db57 100644 --- a/ci/Dockerfile.test +++ b/ci/Dockerfile.test @@ -1,6 +1,6 @@ # Heavily inspired from Dockerfile, this one also install requirements_for_test.txt -FROM python:3.10-alpine@sha256:860f632e67178d9e90c7dfa9844a5e02098220bff5716d3c2fe1870325f00853 +FROM python:3.10-alpine@sha256:7edffe5acc6a2c4c009fece2fbdc85f04fde4c8481202473b880ef3f8fbb2939 ENV PYTHONDONTWRITEBYTECODE 1 ENV POETRY_VERSION "1.7.1" diff --git a/docker-compose.yml b/docker-compose.yml index 610d8dc306..443727d0c6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,7 +18,7 @@ services: - "listen_addresses=*" restart: always redis: - image: redis:6.2@sha256:9e75c88539241ad7f61bc9c39ea4913b354064b8a75ca5fc40e1cef41b645bc0 + image: redis:6.2@sha256:d4948d011cc38e94f0aafb8f9a60309bd93034e07d10e0767af534512cf012a9 web: image: notification-api restart: always diff --git a/local/Dockerfile b/local/Dockerfile index f4ea41376c..8c0e128f7a 100644 --- a/local/Dockerfile +++ b/local/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10-alpine@sha256:860f632e67178d9e90c7dfa9844a5e02098220bff5716d3c2fe1870325f00853 +FROM python:3.10-alpine@sha256:7edffe5acc6a2c4c009fece2fbdc85f04fde4c8481202473b880ef3f8fbb2939 ENV PYTHONDONTWRITEBYTECODE 1 ENV POETRY_VERSION "1.7.1" diff --git a/migrations/versions/0445_add_org_id_branding.py b/migrations/versions/0445_add_org_id_branding.py new file mode 100644 index 0000000000..0504d5f492 --- /dev/null +++ b/migrations/versions/0445_add_org_id_branding.py @@ -0,0 +1,46 @@ +""" +Revision ID: 0445_add_org_id_branding +Revises: 0444_add_index_n_history2.py +Create Date: 2024-02-27 +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "0445_add_org_id_branding" +down_revision = "0444_add_index_n_history2" + + +def upgrade(): + op.add_column( + "email_branding", + sa.Column("organisation_id", postgresql.UUID(as_uuid=True), nullable=True), + ) + op.create_index( + op.f("ix_email_branding_organisation_id"), + "email_branding", + ["organisation_id"], + unique=False, + ) + op.create_foreign_key( + "fk_email_branding_organisation", + "email_branding", + "organisation", + ["organisation_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("fk_organisation_email_branding_id", "organisation", type_="foreignkey") + + +def downgrade(): + op.drop_index(op.f("ix_email_branding_organisation_id"), table_name="email_branding") + op.drop_constraint("fk_email_branding_organisation", "email_branding", type_="foreignkey") + op.drop_column("email_branding", "organisation_id") + op.create_foreign_key( + "fk_organisation_email_branding_id", + "organisation", + "email_branding", + ["email_branding_id"], + ["id"], + ) diff --git a/migrations/versions/0446_add_alt_text.py b/migrations/versions/0446_add_alt_text.py new file mode 100644 index 0000000000..868ce33db7 --- /dev/null +++ b/migrations/versions/0446_add_alt_text.py @@ -0,0 +1,34 @@ +""" +Revision ID: 0446_add_alt_text.py +Revises: 0445_add_org_id_branding.py +Create Date: 2024-04-23 +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy import text + +revision = "0446_add_alt_text" +down_revision = "0445_add_org_id_branding" + + +def upgrade(): + table_description = op.get_bind().execute( + text("SELECT * FROM information_schema.columns WHERE table_name = 'email_branding'") + ) + + # Check if the column exists + if "alt_text_en" not in [column["column_name"] for column in table_description]: + op.add_column( + "email_branding", + sa.Column("alt_text_en", sa.String(), nullable=True), + ) + if "alt_text_fr" not in [column["column_name"] for column in table_description]: + op.add_column( + "email_branding", + sa.Column("alt_text_fr", sa.String(), nullable=True), + ) + + +def downgrade(): + op.drop_column("email_branding", "alt_text_fr") + op.drop_column("email_branding", "alt_text_en") diff --git a/migrations/versions/0447_update_verify_code_template.py b/migrations/versions/0447_update_verify_code_template.py new file mode 100644 index 0000000000..9db7e8f1c8 --- /dev/null +++ b/migrations/versions/0447_update_verify_code_template.py @@ -0,0 +1,97 @@ +""" + +Revision ID: 0447_update_verify_code_template +Revises: 0446_add_alt_text +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0447_update_verify_code_template" +down_revision = "0446_add_alt_text" + +near_content = "\n".join( + [ + "[[en]]", + "Hi ((name)),", + "", + "Here is your security code to log in to GC Notify:", + "", + "^ **((verify_code))**", + "[[/en]]", + "", + "---", + "", + "[[fr]]", + "Bonjour ((name)),", + "", + "Voici votre code de sécurité pour vous connecter à Notification GC:", + "", + "^ **((verify_code))**", + "[[/fr]]", + ] +) + + +templates = [ + { + "id": current_app.config["EMAIL_2FA_TEMPLATE_ID"], + "template_type": "email", + "subject": "Sign in | Connectez-vous", + "content": near_content, + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + name = conn.execute("select name from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + template["name"] = name[0] + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0448_update_verify_code2.py b/migrations/versions/0448_update_verify_code2.py new file mode 100644 index 0000000000..39f3acee1b --- /dev/null +++ b/migrations/versions/0448_update_verify_code2.py @@ -0,0 +1,97 @@ +""" + +Revision ID: 0448_update_verify_code2 +Revises: 0447_update_verify_code_template +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0448_update_verify_code2" +down_revision = "0447_update_verify_code_template" + +near_content = "\n".join( + [ + "[[en]]", + "Hi ((name)),", + "", + "Here is your security code to log in to GC Notify:", + "", + "^ ((verify_code))", + "[[/en]]", + "", + "---", + "", + "[[fr]]", + "Bonjour ((name)),", + "", + "Voici votre code de sécurité pour vous connecter à Notification GC:", + "", + "^ ((verify_code))", + "[[/fr]]", + ] +) + + +templates = [ + { + "id": current_app.config["EMAIL_2FA_TEMPLATE_ID"], + "template_type": "email", + "subject": "Sign in | Connectez-vous", + "content": near_content, + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + name = conn.execute("select name from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + template["name"] = name[0] + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0449_update_magic_link_auth.py b/migrations/versions/0449_update_magic_link_auth.py new file mode 100644 index 0000000000..6e29d5501c --- /dev/null +++ b/migrations/versions/0449_update_magic_link_auth.py @@ -0,0 +1,97 @@ +""" + +Revision ID: 0448_update_verify_code2 +Revises: 0449_update_magic_link_auth +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0449_update_magic_link_auth" +down_revision = "0448_update_verify_code2" + +near_content = "\n".join( + [ + "[[en]]" + "Hi ((name))," + "" + "Here is your magic link to log in to GC Notify:" + "" + "^ **[Sign-in](((link_url_en)))**" + "[[/en]]" + "" + "---" + "" + "[[fr]]" + "Bonjour ((name))," + "" + "Voici votre lien magique pour vous connecter à Notification GC:" + "" + "^ **[Connectez-vous](((link_url_fr)))**" + "[[/fr]]" + ] +) + + +template = { + "id": current_app.config["EMAIL_MAGIC_LINK_TEMPLATE_ID"], + "template_type": "email", + "subject": "Sign in | Connectez-vous", + "content": near_content, + "process_type": "priority", + "name": "Sign in - Magic Link | Se connecter - Lien magique", +} + + +def upgrade(): + conn = op.get_bind() + + template_insert = """ + INSERT INTO templates (id, name, template_type, created_at, updated_at, content, service_id, subject, created_by_id, version, archived, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', false, '{}', false) + """ + + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + op.execute( + template_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + 1, + template["process_type"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + 1, + template["process_type"], + ) + ) + + op.execute("INSERT INTO auth_type (name) VALUES ('magic_link')") + + +def downgrade(): + op.execute("DELETE FROM auth_type WHERE name = 'magic_link'") diff --git a/migrations/versions/0450_enable_pinpoint_provider.py b/migrations/versions/0450_enable_pinpoint_provider.py new file mode 100644 index 0000000000..0c2c8247dd --- /dev/null +++ b/migrations/versions/0450_enable_pinpoint_provider.py @@ -0,0 +1,19 @@ +""" + +Revision ID: 0450_enable_pinpoint_provider +Revises: 0449_update_magic_link_auth +Create Date: 2021-01-08 09:03:00 .214680 + +""" +from alembic import op + +revision = "0450_enable_pinpoint_provider" +down_revision = "0449_update_magic_link_auth" + + +def upgrade(): + op.execute("UPDATE provider_details set active=true where identifier in ('pinpoint');") + + +def downgrade(): + op.execute("UPDATE provider_details set active=false where identifier in ('pinpoint');") diff --git a/migrations/versions/0451_create_db_users.py b/migrations/versions/0451_create_db_users.py new file mode 100644 index 0000000000..c16c162db5 --- /dev/null +++ b/migrations/versions/0451_create_db_users.py @@ -0,0 +1,39 @@ +""" + +Revision ID: 0451_create_db_users +Revises: 0450_enable_pinpoint_provider +Create Date: 2024-05-23 12:00:00 + +""" +from alembic import op + +revision = "0451_create_db_users" +down_revision = "0450_enable_pinpoint_provider" + +super_role = "rds_superuser" +roles = ["app_db_user", "quicksight_db_user"] + + +def upgrade(): + create_role_if_not_exist(super_role) + for role in roles: + create_role_if_not_exist(role) + op.execute(f"GRANT {super_role} TO {role} WITH ADMIN OPTION;") + + +def create_role_if_not_exist(role): + """ + Makes sure the expected user exists in the database before performing the GRANT USER operation. + If the user already exists, nothing happens. This is needed so that the migrations can be + run on localhost where the users do not exist. + """ + op.execute( + f""" + DO $$ + BEGIN + CREATE ROLE {role}; + EXCEPTION WHEN duplicate_object THEN RAISE NOTICE '%, skipping', SQLERRM USING ERRCODE = SQLSTATE; + END + $$; + """ + ) diff --git a/migrations/versions/0452_set_pgaudit_config.py b/migrations/versions/0452_set_pgaudit_config.py new file mode 100644 index 0000000000..88f0e87b8e --- /dev/null +++ b/migrations/versions/0452_set_pgaudit_config.py @@ -0,0 +1,53 @@ +""" + +Revision ID: 0452_set_pgaudit_config +Revises: 0451_create_db_users +Create Date: 2024-05-27 12:00:00 + +""" +from alembic import op + +revision = "0452_set_pgaudit_config" +down_revision = "0451_create_db_users" + +users = ["app_db_user", "rdsproxyadmin"] +database_name = op.get_bind().engine.url.database # database name that the migration is being run on + + +def upgrade(): + # Skip this migration in the test database as there are multiple test databases that are created. + # This leads to a race condition attempting to alter the same users multiple times and causes + # sporadic unit test failures. + if "test_notification_api" in database_name: + return + + for user in users: + create_user_if_not_exists(user) + op.execute(f"ALTER USER {user} SET pgaudit.log TO 'NONE'") + + +def downgrade(): + if "test_notification_api" in database_name: + return + + # Reset the pgaudit.log setting + for user in users: + op.execute(f"ALTER USER {user} RESET pgaudit.log") + + +def create_user_if_not_exists(user): + """ + Makes sure the expected user exists in the database before performing the ALTER USER operation. + If the user already exists, nothing happens. This is needed so that the migrations can be + run on localhost where the users do not exist. + """ + op.execute( + f""" + DO $$ + BEGIN + CREATE USER {user}; + EXCEPTION WHEN duplicate_object THEN RAISE NOTICE '%, skipping', SQLERRM USING ERRCODE = SQLSTATE; + END + $$; + """ + ) diff --git a/poetry.lock b/poetry.lock index 3dfb13f4a1..7ac0583fc6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -210,18 +210,18 @@ aiohttp = "*" [[package]] name = "awscli" -version = "1.32.25" +version = "1.32.100" description = "Universal Command Line Environment for AWS." optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "awscli-1.32.25-py3-none-any.whl", hash = "sha256:eea617961175e8bd1bd3aeda706948c89dc353fc934e81c156fe1ea484ca7a31"}, - {file = "awscli-1.32.25.tar.gz", hash = "sha256:091bbdb852b984d81fb5d8bf00100edd9e40750c77e1542f7ce3ac952a01df6d"}, + {file = "awscli-1.32.100-py3-none-any.whl", hash = "sha256:46e4a44dafeffe63980ab2cd0240aa15a4879cf5d84f210c9eb0facc05e7bf0a"}, + {file = "awscli-1.32.100.tar.gz", hash = "sha256:7bd06388d7853508f96a91291c28b0745ac0a5ac73276cb7db48478d6d3c2a70"}, ] [package.dependencies] -botocore = "1.34.25" -colorama = ">=0.2.5,<0.4.5" +botocore = "1.34.100" +colorama = ">=0.2.5,<0.4.7" docutils = ">=0.10,<0.17" PyYAML = ">=3.10,<6.1" rsa = ">=3.1.2,<4.8" @@ -372,17 +372,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.25" +version = "1.34.100" description = "The AWS SDK for Python" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "boto3-1.34.25-py3-none-any.whl", hash = "sha256:87532469188f1eeef4dca67dffbd3f0cc1d51cef7d5e5b5dc95d3b8125f8446e"}, - {file = "boto3-1.34.25.tar.gz", hash = "sha256:1b415e0553679ea05b9e2aed3eb271431011a67a165e3e0aefa323e13b8b7e92"}, + {file = "boto3-1.34.100-py3-none-any.whl", hash = "sha256:bbe2bb0dfcd92380da2a2fa2c2f586ba06c118b796380b2d0f3d0ebd103ec28d"}, + {file = "boto3-1.34.100.tar.gz", hash = "sha256:016f6d66900bb1a835dea2063f1e91fc7057dbf7fb7df8add0706f0da9492631"}, ] [package.dependencies] -botocore = ">=1.34.25,<1.35.0" +botocore = ">=1.34.100,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -391,22 +391,22 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.25" +version = "1.34.100" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "botocore-1.34.25-py3-none-any.whl", hash = "sha256:35dfab5bdb4620f73ac7c557c4e0d012429706d8760b100f099feea34b5505f8"}, - {file = "botocore-1.34.25.tar.gz", hash = "sha256:a39070bb760bd9545b0eef52a8bcb2d03918206e67a5a786ea4bd6f4bd949edd"}, + {file = "botocore-1.34.100-py3-none-any.whl", hash = "sha256:ee516fb9e9e906d311f2a9921afaf79c594db239a5b4b626e89e6960401aad0b"}, + {file = "botocore-1.34.100.tar.gz", hash = "sha256:513bea60c6531af8e1ae1fdb2947e3ef99712f39c58f4656b5efef9cb6f75a13"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.19.19)"] +crt = ["awscrt (==0.20.9)"] [[package]] name = "brotli" @@ -1623,22 +1623,23 @@ test = ["objgraph", "psutil"] [[package]] name = "gunicorn" -version = "20.1.0" +version = "22.0.0" description = "WSGI HTTP Server for UNIX" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, - {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, + {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, + {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, ] [package.dependencies] -setuptools = ">=3.0" +packaging = "*" [package.extras] -eventlet = ["eventlet (>=0.24.1)"] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] tornado = ["tornado (>=0.2)"] [[package]] @@ -1972,71 +1973,71 @@ testing = ["pytest"] [[package]] name = "markupsafe" -version = "2.1.4" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, - {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -2428,7 +2429,7 @@ requests = ">=2.0.0" [[package]] name = "notifications-utils" -version = "52.1.4" +version = "52.2.6" description = "Shared python code for Notification - Provides logging utils etc." optional = false python-versions = "~3.10.9" @@ -2436,9 +2437,9 @@ files = [] develop = false [package.dependencies] -awscli = "1.32.25" +awscli = "1.32.100" bleach = "6.1.0" -boto3 = "1.34.25" +boto3 = "1.34.100" cachetools = "4.2.4" certifi = "^2023.7.22" cryptography = "^42.0.3" @@ -2446,10 +2447,10 @@ Flask = "2.3.3" Flask-Redis = "0.4.0" itsdangerous = "2.1.2" Jinja2 = "^3.0.0" -markupsafe = "2.1.4" +markupsafe = "2.1.5" mistune = "0.8.4" ordered-set = "4.1.0" -phonenumbers = "8.13.28" +phonenumbers = "8.13.36" py_w3c = "0.3.1" pypdf2 = "1.28.6" python-json-logger = "2.0.7" @@ -2458,13 +2459,13 @@ PyYAML = "6.0.1" requests = "2.31.0" smartypants = "2.0.1" statsd = "3.3.0" -werkzeug = "2.3.7" +werkzeug = "3.0.3" [package.source] type = "git" url = "https://github.com/cds-snc/notifier-utils.git" -reference = "52.1.4" -resolved_reference = "f62de796c4cfc25ee6ed2f1d51648dd2d0658fc0" +reference = "52.2.6" +resolved_reference = "d67d77609378d0e6e33305a007806d32566e21a1" [[package]] name = "ordered-set" @@ -2502,49 +2503,15 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] -[[package]] -name = "pendulum" -version = "2.1.2" -description = "Python datetimes made easy" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, -] - -[package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" - [[package]] name = "phonenumbers" -version = "8.13.28" +version = "8.13.36" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.28-py2.py3-none-any.whl", hash = "sha256:ad7bc7d7fd6599a124423ffb840409630777c72d0ee58ba8070cc8e7efcb4c38"}, - {file = "phonenumbers-8.13.28.tar.gz", hash = "sha256:e22f276b0c4a70bd5b3f6d668d19cab2578f660b8df44d6418f81d64320151b9"}, + {file = "phonenumbers-8.13.36-py2.py3-none-any.whl", hash = "sha256:68e06d20ae2f8fe5c7c7fd5b433f4257bc3cc747dc5196a029c7898ea449b012"}, + {file = "phonenumbers-8.13.36.tar.gz", hash = "sha256:b4e2371e35a1172aa2c91c9200b1e48e87b9355eb575768dd38058fc8d72c9ff"}, ] [[package]] @@ -2935,6 +2902,9 @@ files = [ {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + [package.extras] crypto = ["cryptography (>=3.4.0)"] dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] @@ -3215,17 +3185,6 @@ files = [ {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] -[[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, -] - [[package]] name = "pywin32" version = "306" @@ -3590,21 +3549,20 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar [[package]] name = "simple-salesforce" -version = "1.12.5" +version = "1.12.6" description = "A basic Salesforce.com REST API client." optional = false python-versions = "*" files = [ - {file = "simple-salesforce-1.12.5.tar.gz", hash = "sha256:ef65f72438e3b215619f6835d3d4356e147adf3a7ece6896d239127dd6aefcd1"}, - {file = "simple_salesforce-1.12.5-py2.py3-none-any.whl", hash = "sha256:07029575385d04132babfd6e19c1c8068c859d616a45dab07bbf9875bdc5ab93"}, + {file = "simple-salesforce-1.12.6.tar.gz", hash = "sha256:77590606c781905f6b75430562951dd2b062438da7f55fca2b61e4cde31df15b"}, + {file = "simple_salesforce-1.12.6-py2.py3-none-any.whl", hash = "sha256:66c74bee88d09ace46e4fc9c2f6b47c0d012817a764f70a5455d6dc2c7ed635c"}, ] [package.dependencies] -cryptography = "*" more-itertools = "*" -pendulum = "*" -pyjwt = "*" +pyjwt = {version = "*", extras = ["crypto"]} requests = ">=2.22.0" +typing-extensions = "*" zeep = "*" [[package]] @@ -3630,57 +3588,57 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.51" +version = "1.4.52" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.51-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:1a09d5bd1a40d76ad90e5570530e082ddc000e1d92de495746f6257dc08f166b"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2be4e6294c53f2ec8ea36486b56390e3bcaa052bf3a9a47005687ccf376745d1"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca484ca11c65e05639ffe80f20d45e6be81fbec7683d6c9a15cd421e6e8b340"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0535d5b57d014d06ceeaeffd816bb3a6e2dddeb670222570b8c4953e2d2ea678"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af55cc207865d641a57f7044e98b08b09220da3d1b13a46f26487cc2f898a072"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-win32.whl", hash = "sha256:7af40425ac535cbda129d9915edcaa002afe35d84609fd3b9d6a8c46732e02ee"}, - {file = "SQLAlchemy-1.4.51-cp310-cp310-win_amd64.whl", hash = "sha256:8d1d7d63e5d2f4e92a39ae1e897a5d551720179bb8d1254883e7113d3826d43c"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eaeeb2464019765bc4340214fca1143081d49972864773f3f1e95dba5c7edc7d"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7deeae5071930abb3669b5185abb6c33ddfd2398f87660fafdb9e6a5fb0f3f2f"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0892e7ac8bc76da499ad3ee8de8da4d7905a3110b952e2a35a940dab1ffa550e"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-win32.whl", hash = "sha256:50e074aea505f4427151c286955ea025f51752fa42f9939749336672e0674c81"}, - {file = "SQLAlchemy-1.4.51-cp311-cp311-win_amd64.whl", hash = "sha256:3b0cd89a7bd03f57ae58263d0f828a072d1b440c8c2949f38f3b446148321171"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a33cb3f095e7d776ec76e79d92d83117438b6153510770fcd57b9c96f9ef623d"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cacc0b2dd7d22a918a9642fc89840a5d3cee18a0e1fe41080b1141b23b10916"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245c67c88e63f1523e9216cad6ba3107dea2d3ee19adc359597a628afcabfbcb"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-win32.whl", hash = "sha256:8e702e7489f39375601c7ea5a0bef207256828a2bc5986c65cb15cd0cf097a87"}, - {file = "SQLAlchemy-1.4.51-cp312-cp312-win_amd64.whl", hash = "sha256:0525c4905b4b52d8ccc3c203c9d7ab2a80329ffa077d4bacf31aefda7604dc65"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:1980e6eb6c9be49ea8f89889989127daafc43f0b1b6843d71efab1514973cca0"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec7a0ed9b32afdf337172678a4a0e6419775ba4e649b66f49415615fa47efbd"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352df882088a55293f621328ec33b6ffca936ad7f23013b22520542e1ab6ad1b"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:86a22143a4001f53bf58027b044da1fb10d67b62a785fc1390b5c7f089d9838c"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37bc677690fd33932182b85d37433845de612962ed080c3e4d92f758d1bd894"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-win32.whl", hash = "sha256:d0a83afab5e062abffcdcbcc74f9d3ba37b2385294dd0927ad65fc6ebe04e054"}, - {file = "SQLAlchemy-1.4.51-cp36-cp36m-win_amd64.whl", hash = "sha256:a61184c7289146c8cff06b6b41807c6994c6d437278e72cf00ff7fe1c7a263d1"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:3f0ef620ecbab46e81035cf3dedfb412a7da35340500ba470f9ce43a1e6c423b"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c55040d8ea65414de7c47f1a23823cd9f3fad0dc93e6b6b728fee81230f817b"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ef80328e3fee2be0a1abe3fe9445d3a2e52a1282ba342d0dab6edf1fef4707"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8cafa6f885a0ff5e39efa9325195217bb47d5929ab0051636610d24aef45ade"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f2df79a46e130235bc5e1bbef4de0583fb19d481eaa0bffa76e8347ea45ec6"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-win32.whl", hash = "sha256:f2e5b6f5cf7c18df66d082604a1d9c7a2d18f7d1dbe9514a2afaccbb51cc4fc3"}, - {file = "SQLAlchemy-1.4.51-cp37-cp37m-win_amd64.whl", hash = "sha256:5e180fff133d21a800c4f050733d59340f40d42364fcb9d14f6a67764bdc48d2"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7d8139ca0b9f93890ab899da678816518af74312bb8cd71fb721436a93a93298"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb18549b770351b54e1ab5da37d22bc530b8bfe2ee31e22b9ebe650640d2ef12"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55e699466106d09f028ab78d3c2e1f621b5ef2c8694598242259e4515715da7c"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2ad16880ccd971ac8e570550fbdef1385e094b022d6fc85ef3ce7df400dddad3"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97fd5bb6b7c1a64b7ac0632f7ce389b8ab362e7bd5f60654c2a418496be5d7f"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-win32.whl", hash = "sha256:cecb66492440ae8592797dd705a0cbaa6abe0555f4fa6c5f40b078bd2740fc6b"}, - {file = "SQLAlchemy-1.4.51-cp38-cp38-win_amd64.whl", hash = "sha256:39b02b645632c5fe46b8dd30755682f629ffbb62ff317ecc14c998c21b2896ff"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b03850c290c765b87102959ea53299dc9addf76ca08a06ea98383348ae205c99"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e646b19f47d655261b22df9976e572f588185279970efba3d45c377127d35349"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3cf56cc36d42908495760b223ca9c2c0f9f0002b4eddc994b24db5fcb86a9e4"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0d661cff58c91726c601cc0ee626bf167b20cc4d7941c93c5f3ac28dc34ddbea"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3823dda635988e6744d4417e13f2e2b5fe76c4bf29dd67e95f98717e1b094cad"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-win32.whl", hash = "sha256:b00cf0471888823b7a9f722c6c41eb6985cf34f077edcf62695ac4bed6ec01ee"}, - {file = "SQLAlchemy-1.4.51-cp39-cp39-win_amd64.whl", hash = "sha256:a055ba17f4675aadcda3005df2e28a86feb731fdcc865e1f6b4f209ed1225cba"}, - {file = "SQLAlchemy-1.4.51.tar.gz", hash = "sha256:e7908c2025eb18394e32d65dd02d2e37e17d733cdbe7d78231c2b6d7eb20cdb9"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, + {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, ] [package.dependencies] @@ -3839,13 +3797,13 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. [[package]] name = "types-boto" -version = "2.49.18.9" +version = "2.49.18.20240205" description = "Typing stubs for boto" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-boto-2.49.18.9.tar.gz", hash = "sha256:fe711d938c237be50346a1bdc2231d3170453fe734789075dd088458e4e9442d"}, - {file = "types_boto-2.49.18.9-py3-none-any.whl", hash = "sha256:b44e8aead5e34bc336a813af90fdbb9ac5bb1091de839042628163463d9948eb"}, + {file = "types-boto-2.49.18.20240205.tar.gz", hash = "sha256:6c7f3945e5759e1f8a760e2843adbeb1eea64f869f3a3070af7cfcfc25ea71bd"}, + {file = "types_boto-2.49.18.20240205-py3-none-any.whl", hash = "sha256:9873214ce37756a6145c165fb9beaf80cb4ac1df5a5967f6a0945109c8c4469a"}, ] [[package]] @@ -3897,13 +3855,13 @@ files = [ [[package]] name = "types-redis" -version = "4.6.0.20240106" +version = "4.6.0.20240425" description = "Typing stubs for redis" optional = false python-versions = ">=3.8" files = [ - {file = "types-redis-4.6.0.20240106.tar.gz", hash = "sha256:2b2fa3a78f84559616242d23f86de5f4130dfd6c3b83fb2d8ce3329e503f756e"}, - {file = "types_redis-4.6.0.20240106-py3-none-any.whl", hash = "sha256:912de6507b631934bd225cdac310b04a58def94391003ba83939e5a10e99568d"}, + {file = "types-redis-4.6.0.20240425.tar.gz", hash = "sha256:9402a10ee931d241fdfcc04592ebf7a661d7bb92a8dea631279f0d8acbcf3a22"}, + {file = "types_redis-4.6.0.20240425-py3-none-any.whl", hash = "sha256:ac5bc19e8f5997b9e76ad5d9cf15d0392d9f28cf5fc7746ea4a64b989c45c6a8"}, ] [package.dependencies] @@ -3912,13 +3870,13 @@ types-pyOpenSSL = "*" [[package]] name = "types-requests" -version = "2.31.0.20240106" +version = "2.31.0.20240406" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.20240106.tar.gz", hash = "sha256:0e1c731c17f33618ec58e022b614a1a2ecc25f7dc86800b36ef341380402c612"}, - {file = "types_requests-2.31.0.20240106-py3-none-any.whl", hash = "sha256:da997b3b6a72cc08d09f4dba9802fdbabc89104b35fe24ee588e674037689354"}, + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, ] [package.dependencies] @@ -4009,13 +3967,13 @@ files = [ [[package]] name = "werkzeug" -version = "2.3.7" +version = "3.0.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"}, - {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"}, + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, ] [package.dependencies] @@ -4239,4 +4197,5 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "~3.10.9" -content-hash = "2542cad11c77bc4f2bd7dbd9e1fbb1f8595a44823a4212bcb69127ded97d31c2" +content-hash = "d248dd0bd87785c37d04487401d2fd432fd03c296223986d5f2c34f3e57b5275" + diff --git a/pyproject.toml b/pyproject.toml index 7b033d1b5c..07008e9a0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ Flask-SQLAlchemy = { git = "https://github.com/pallets-eco/flask-sqlalchemy.git" Flask = "2.3.3" click-datetime = "0.2" gevent = "23.9.1" + gunicorn = "20.1.0" iso8601 = "2.1.0" jsonschema = "3.2.0" @@ -45,8 +46,9 @@ psycopg2-binary = "2.9.9" PyJWT = "2.8.0" pytz = "2021.3" PyYAML = "6.0.1" -SQLAlchemy = "1.4.51" + cachelib = "0.12.0" +SQLAlchemy = "1.4.52" newrelic = "6.10.0.165" notifications-python-client = "6.4.1" python-dotenv = "1.0.1" @@ -59,11 +61,11 @@ more-itertools = "8.14.0" awscli-cwlogs = "1.4.6" aws-embedded-metrics = "1.0.8" # Putting upgrade on hold due to new version introducing breaking changes -Werkzeug = "2.3.7" -MarkupSafe = "2.1.4" +Werkzeug = "3.0.3" +MarkupSafe = "2.1.5" # REVIEW: v2 is using sha512 instead of sha1 by default (in v1) itsdangerous = "2.1.2" -notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.1.4" } +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.2.6" } # rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 typing-extensions = "4.10.0" greenlet = "2.0.2" @@ -103,5 +105,5 @@ types-boto = "2.49.18.9" types-mock = "4.0.15.2" types-python-dateutil = "2.8.19.20240106" types-pytz = "2022.7.1.2" -types-requests = "2.31.0.20240106" -types-redis = "4.6.0.20240106" +types-requests = "2.31.0.20240406" +types-redis = "4.6.0.20240425" diff --git a/scripts/callManifestsRollout.sh b/scripts/callManifestsRollout.sh new file mode 100755 index 0000000000..29229ea093 --- /dev/null +++ b/scripts/callManifestsRollout.sh @@ -0,0 +1,17 @@ +#!/bin/bash +GITHUB_SHA=$1 +PAYLOAD="{\"ref\":\"main\",\"inputs\":{\"docker_sha\":\"$GITHUB_SHA\"}}" + + +RESPONSE=$(curl -w '%{http_code}\n' \ + -o /dev/null -s \ + -L -X POST -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $WORKFLOW_PAT" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/cds-snc/notification-manifests/actions/workflows/api-rollout-k8s-staging.yaml/dispatches \ + -d "$PAYLOAD") + +if [ "$RESPONSE" != 204 ]; then + echo "ERROR CALLING MANIFESTS ROLLOUT: HTTP RESPONSE: $RESPONSE" + exit 1 +fi diff --git a/scripts/run_celery.ps1 b/scripts/run_celery.ps1 index 724b47766e..b35cb71e94 100644 --- a/scripts/run_celery.ps1 +++ b/scripts/run_celery.ps1 @@ -1,3 +1,3 @@ $ENV:FORKED_BY_MULTIPROCESSING=1 -celery --app run_celery worker --pidfile="$env:TEMP\celery.pid" --pool=solo --loglevel=DEBUG --concurrency=1 -Q "database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-email-tasks,service-callbacks,delivery-receipts" +celery --app run_celery worker --pidfile="$env:TEMP\celery.pid" --pool=solo --loglevel=DEBUG --concurrency=1 -Q "database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,service-callbacks,service-callbacks-retry,delivery-receipts" diff --git a/scripts/run_celery.sh b/scripts/run_celery.sh index 99f09ac9bb..6d83d67054 100755 --- a/scripts/run_celery.sh +++ b/scripts/run_celery.sh @@ -6,4 +6,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-email-tasks,service-callbacks,delivery-receipts +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,service-callbacks,service-callbacks-retry,delivery-receipts diff --git a/scripts/run_celery_core_tasks.sh b/scripts/run_celery_core_tasks.sh index e109bce78d..060af2ad37 100755 --- a/scripts/run_celery_core_tasks.sh +++ b/scripts/run_celery_core_tasks.sh @@ -7,4 +7,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,service-callbacks,delivery-receipts +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,service-callbacks,service-callbacks-retry,delivery-receipts diff --git a/scripts/run_celery_local.sh b/scripts/run_celery_local.sh index d9f439b8f9..9eb29f2658 100755 --- a/scripts/run_celery_local.sh +++ b/scripts/run_celery_local.sh @@ -7,4 +7,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -celery -A run_celery.notify_celery worker --beat --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-throttled-sms-tasks,send-email-high,send-email-medium,send-email-low,send-email-tasks,service-callbacks,delivery-receipts +celery -A run_celery.notify_celery worker --beat --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,send-throttled-sms-tasks,send-email-high,send-email-medium,send-email-low,service-callbacks,service-callbacks-retry,delivery-receipts diff --git a/scripts/run_celery_no_sms_sending.sh b/scripts/run_celery_no_sms_sending.sh index cebefa7435..53546088b1 100755 --- a/scripts/run_celery_no_sms_sending.sh +++ b/scripts/run_celery_no_sms_sending.sh @@ -3,7 +3,7 @@ set -e # Runs celery with all celery queues except send-throttled-sms-tasks, -# send-sms-tasks, send-sms-high, send-sms-medium, or send-sms-low. +# send-sms-high, send-sms-medium, or send-sms-low. # Check and see if this is running in K8s and if so, wait for cloudwatch agent if [ -n "${STATSD_HOST}" ]; then @@ -28,4 +28,4 @@ fi echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-email-tasks,send-email-high,send-email-medium,send-email-low,service-callbacks,delivery-receipts +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-email-high,send-email-medium,send-email-low,service-callbacks,service-callbacks-retry,delivery-receipts diff --git a/scripts/run_celery_send_email.sh b/scripts/run_celery_send_email.sh index 98fda14a68..29c6039f09 100755 --- a/scripts/run_celery_send_email.sh +++ b/scripts/run_celery_send_email.sh @@ -6,5 +6,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -# TODO: we shouldn't be using the send-email-tasks queue anymore, once we verify this we can remove it -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-email-tasks,send-email-high,send-email-medium,send-email-low +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-email-high,send-email-medium,send-email-low diff --git a/scripts/run_celery_send_sms.sh b/scripts/run_celery_send_sms.sh index 5f7865b62c..7aee759338 100755 --- a/scripts/run_celery_send_sms.sh +++ b/scripts/run_celery_send_sms.sh @@ -6,5 +6,4 @@ set -e echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" -# TODO: we shouldn't be using the send-sms-tasks queue anymore - once we verify this we can remove it -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-sms-high,send-sms-medium,send-sms-low diff --git a/tests-perf/locust/README.md b/tests-perf/locust/README.md index 561adafdd2..afe4c639f0 100644 --- a/tests-perf/locust/README.md +++ b/tests-perf/locust/README.md @@ -36,7 +36,7 @@ You should not have to modify the configuration to run the stress-tests locally. There are two ways to run Locust, with the UI or headless. -### Add the following to your .env file (ask a coworker): +### Add the following to your .env file (see 1Password): ``` PERF_TEST_AUTH_HEADER = @@ -67,6 +67,13 @@ locust -f .\locust-notifications.py --headless --users=5500 --spawn-rate=200 --r You can also modify the *locust.config* file to enable the headless mode and define the necessary users, spawn rate and run time. +## Email send rate test + +We also max out the email send rate by sending 2000 x 5 emails per minute for 10 minutes. This can be run manually with the command +``` +locust --headless --host https://api.staging.notification.cdssandbox.xyz --locustfile tests-perf/locust/send_rate_email.py --users 5 --run-time 10m --spawn-rate 1 +``` + ### Performance Testing on AWS We run Notify performance tests on a daily manner through AWS ECS tasks diff --git a/tests-perf/locust/locust.conf b/tests-perf/locust/locust.conf index c1eba3b220..76aa3d2273 100644 --- a/tests-perf/locust/locust.conf +++ b/tests-perf/locust/locust.conf @@ -3,7 +3,7 @@ locustfile = tests-perf/locust/locust-notifications.py host = https://api.staging.notification.cdssandbox.xyz users = 3000 spawn-rate = 20 -run-time = 5m +run-time = 10m # headless = true # master = true diff --git a/tests-perf/locust/send_rate_email.py b/tests-perf/locust/send_rate_email.py new file mode 100644 index 0000000000..a2a26fef73 --- /dev/null +++ b/tests-perf/locust/send_rate_email.py @@ -0,0 +1,60 @@ +""" send_rate_email.py + isort:skip_file +""" +# flake8: noqa + +BULK_EMAIL_SIZE = 2000 + +import os +import sys +from datetime import datetime +from dataclasses import make_dataclass + +sys.path.append(os.path.abspath(os.path.join("..", "tests_smoke"))) + +from dotenv import load_dotenv +from locust import HttpUser, constant_pacing, task +from tests_smoke.smoke.common import job_line, rows_to_csv # type: ignore + +load_dotenv() +NotifyApiUserTemplateGroup = make_dataclass('NotifyApiUserTemplateGroup', [ + 'bulk_email_id', + 'email_id', + 'email_with_attachment_id', + 'email_with_link_id', + 'sms_id', +]) + + +class NotifyApiUser(HttpUser): + + wait_time = constant_pacing(60) # 60 seconds between each task + host = os.getenv("PERF_TEST_DOMAIN", "https://api.staging.notification.cdssandbox.xyz") + + def __init__(self, *args, **kwargs): + super(NotifyApiUser, self).__init__(*args, **kwargs) + + self.headers = {"Authorization": os.getenv("PERF_TEST_AUTH_HEADER")} + self.email = os.getenv("PERF_TEST_EMAIL", "success@simulator.amazonses.com") + self.phone_number = os.getenv("PERF_TEST_PHONE_NUMBER", "16135550123") + self.template_group = NotifyApiUserTemplateGroup( + bulk_email_id=os.getenv("PERF_TEST_BULK_EMAIL_TEMPLATE_ID"), + email_id=os.getenv("PERF_TEST_EMAIL_TEMPLATE_ID"), + email_with_attachment_id=os.getenv("PERF_TEST_EMAIL_WITH_ATTACHMENT_TEMPLATE_ID"), + email_with_link_id=os.getenv("PERF_TEST_EMAIL_WITH_LINK_TEMPLATE_ID"), + sms_id=os.getenv("PERF_TEST_SMS_TEMPLATE_ID"), + ) + + @task(1) + def send_bulk_email_notifications(self): + """ + Send BULK_EMAIL_SIZE emails through the /bulk endpoint + """ + + json = { + "name": f"Send rate test {datetime.utcnow().isoformat()}", + "template_id": self.template_group.bulk_email_id, + "csv": rows_to_csv([["email address", "application_file"], *job_line(self.email, BULK_EMAIL_SIZE)]) + } + + self.client.post("/v2/notifications/bulk", json=json, headers=self.headers) diff --git a/tests/app/api_key/test_rest.py b/tests/app/api_key/test_rest.py index a28985884a..d236f8cd8b 100644 --- a/tests/app/api_key/test_rest.py +++ b/tests/app/api_key/test_rest.py @@ -81,6 +81,9 @@ def test_revoke_api_keys_with_valid_auth_revokes_and_notifies_user(self, client, api_key_1 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 1") unsigned_secret = get_unsigned_secret(api_key_1.id) + # Create token expected from the frontend + unsigned_secret = f"gcntfy-keyname-{service.id}-{unsigned_secret}" + sre_auth_header = create_sre_authorization_header() response = client.post( url_for("sre_tools.revoke_api_keys"), @@ -89,7 +92,7 @@ def test_revoke_api_keys_with_valid_auth_revokes_and_notifies_user(self, client, ) # Get api key from DB - api_key_1 = get_api_key_by_secret(api_key_1.secret) + api_key_1 = get_api_key_by_secret(unsigned_secret) assert response.status_code == 201 assert api_key_1.expiry_date is not None assert api_key_1.compromised_key_info["type"] == "cds-tester" diff --git a/tests/app/authentication/test_authentication.py b/tests/app/authentication/test_authentication.py index 16299507b9..40615fc6a6 100644 --- a/tests/app/authentication/test_authentication.py +++ b/tests/app/authentication/test_authentication.py @@ -135,18 +135,28 @@ def test_admin_auth_should_not_allow_api_key_scheme(client, sample_api_key): @pytest.mark.parametrize("scheme", ["ApiKey-v1", "apikey-v1", "APIKEY-V1"]) def test_should_allow_auth_with_api_key_scheme(client, sample_api_key, scheme): api_key_secret = get_unsigned_secret(sample_api_key.id) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + response = client.get("/notifications", headers={"Authorization": f"{scheme} {unsigned_secret}"}) + + assert response.status_code == 200 - response = client.get("/notifications", headers={"Authorization": f"{scheme} {api_key_secret}"}) + +def test_should_allow_auth_with_api_key_scheme_and_extra_spaces(client, sample_api_key): + api_key_secret = get_unsigned_secret(sample_api_key.id) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {unsigned_secret}"}) assert response.status_code == 200 -def test_should_allow_auth_with_api_key_scheme_36_chars_or_longer(client, sample_api_key): +def test_should_NOT_allow_auth_with_api_key_scheme_with_incorrect_format(client, sample_api_key): api_key_secret = "fhsdkjhfdsfhsd" + get_unsigned_secret(sample_api_key.id) response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {api_key_secret}"}) - assert response.status_code == 200 + assert response.status_code == 403 + error_message = json.loads(response.get_data()) + assert error_message["message"] == {"token": ["Invalid token: Enter your full API key"]} def test_should_not_allow_invalid_api_key(client, sample_api_key): @@ -154,7 +164,7 @@ def test_should_not_allow_invalid_api_key(client, sample_api_key): assert response.status_code == 403 error_message = json.loads(response.get_data()) - assert error_message["message"] == {"token": ["Invalid token: API key not found"]} + assert error_message["message"] == {"token": ["Invalid token: Enter your full API key"]} def test_should_not_allow_expired_api_key(client, sample_api_key): @@ -162,7 +172,9 @@ def test_should_not_allow_expired_api_key(client, sample_api_key): expire_api_key(service_id=sample_api_key.service_id, api_key_id=sample_api_key.id) - response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {api_key_secret}"}) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + + response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {unsigned_secret}"}) assert response.status_code == 403 error_message = json.loads(response.get_data()) diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py index bae56c3f45..02de33cbba 100644 --- a/tests/app/aws/test_s3.py +++ b/tests/app/aws/test_s3.py @@ -1,6 +1,6 @@ import uuid from datetime import datetime, timedelta -from unittest.mock import call +from unittest.mock import Mock, call import pytest import pytz @@ -12,6 +12,7 @@ get_list_of_files_by_suffix, get_s3_bucket_objects, get_s3_file, + remove_jobs_from_s3, remove_transformed_dvla_file, upload_job_to_s3, ) @@ -214,3 +215,30 @@ def test_upload_job_to_s3(notify_api, mocker): bucket_name=current_app.config["CSV_UPLOAD_BUCKET_NAME"], file_location=f"service-{service_id}-notify/{upload_id}.csv", ) + + +def test_remove_jobs_from_s3(notify_api, mocker): + mock = Mock() + mocker.patch("app.aws.s3.resource", return_value=mock) + jobs = [ + type("Job", (object,), {"service_id": "foo", "id": "j1"}), + type("Job", (object,), {"service_id": "foo", "id": "j2"}), + type("Job", (object,), {"service_id": "foo", "id": "j3"}), + type("Job", (object,), {"service_id": "foo", "id": "j4"}), + type("Job", (object,), {"service_id": "foo", "id": "j5"}), + ] + + remove_jobs_from_s3(jobs, batch_size=2) + + mock.assert_has_calls( + [ + call.Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"]), + call.Bucket().delete_objects( + Delete={"Objects": [{"Key": "service-foo-notify/j1.csv"}, {"Key": "service-foo-notify/j2.csv"}]} + ), + call.Bucket().delete_objects( + Delete={"Objects": [{"Key": "service-foo-notify/j3.csv"}, {"Key": "service-foo-notify/j4.csv"}]} + ), + call.Bucket().delete_objects(Delete={"Objects": [{"Key": "service-foo-notify/j5.csv"}]}), + ] + ) diff --git a/tests/app/celery/test_letters_pdf_tasks.py b/tests/app/celery/test_letters_pdf_tasks.py deleted file mode 100644 index 34eb4589c8..0000000000 --- a/tests/app/celery/test_letters_pdf_tasks.py +++ /dev/null @@ -1,923 +0,0 @@ -import base64 -from unittest.mock import ANY, Mock, call - -import boto3 -import pytest -import requests_mock -from botocore.exceptions import ClientError -from flask import current_app -from freezegun import freeze_time -from moto import mock_s3 -from PyPDF2.utils import PdfReadError -from requests import RequestException -from sqlalchemy.orm.exc import NoResultFound - -from app.celery.letters_pdf_tasks import ( - _move_invalid_letter_and_update_status, - _sanitise_precompiled_pdf, - collate_letter_pdfs_for_day, - create_letters_pdf, - get_letters_pdf, - group_letters, - letter_in_created_state, - process_virus_scan_error, - process_virus_scan_failed, - process_virus_scan_passed, - replay_letters_in_error, -) -from app.errors import VirusScanError -from app.letters.utils import ScanErrorType -from app.models import ( - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_PENDING_VIRUS_CHECK, - NOTIFICATION_SENDING, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_VALIDATION_FAILED, - NOTIFICATION_VIRUS_SCAN_FAILED, - Notification, -) -from celery.exceptions import MaxRetriesExceededError, Retry -from tests.app.db import create_letter_branding, create_notification, save_notification -from tests.conftest import set_config_values - - -@pytest.mark.skip(reason="Letter tests") -def test_should_have_decorated_tasks_functions(): - assert create_letters_pdf.__wrapped__.__name__ == "create_letters_pdf" - assert collate_letter_pdfs_for_day.__wrapped__.__name__ == "collate_letter_pdfs_for_day" - assert process_virus_scan_passed.__wrapped__.__name__ == "process_virus_scan_passed" - assert process_virus_scan_failed.__wrapped__.__name__ == "process_virus_scan_failed" - assert process_virus_scan_error.__wrapped__.__name__ == "process_virus_scan_error" - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize("personalisation", [{"name": "test"}, None]) -def test_get_letters_pdf_calls_notifications_template_preview_service_correctly( - notify_api, mocker, client, sample_letter_template, personalisation -): - contact_block = "Mr Foo,\n1 Test Street,\nLondon\nN1" - filename = "opg" - - with set_config_values( - notify_api, - { - "TEMPLATE_PREVIEW_API_HOST": "http://localhost/notifications-template-preview", - "TEMPLATE_PREVIEW_API_KEY": "test-key", - }, - ): - with requests_mock.Mocker() as request_mock: - mock_post = request_mock.post( - "http://localhost/notifications-template-preview/print.pdf", - content=b"\x00\x01", - status_code=200, - ) - - get_letters_pdf( - sample_letter_template, - contact_block=contact_block, - filename=filename, - values=personalisation, - ) - - assert mock_post.last_request.json() == { - "values": personalisation, - "letter_contact_block": contact_block, - "filename": filename, - "template": { - "subject": sample_letter_template.subject, - "content": sample_letter_template.content, - }, - } - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize("page_count,expected_billable_units", [("1", 1), ("2", 1), ("3", 2)]) -def test_get_letters_pdf_calculates_billing_units( - notify_api, - mocker, - client, - sample_letter_template, - page_count, - expected_billable_units, -): - contact_block = "Mr Foo,\n1 Test Street,\nLondon\nN1" - filename = "opg" - - with set_config_values( - notify_api, - { - "TEMPLATE_PREVIEW_API_HOST": "http://localhost/notifications-template-preview", - "TEMPLATE_PREVIEW_API_KEY": "test-key", - }, - ): - with requests_mock.Mocker() as request_mock: - request_mock.post( - "http://localhost/notifications-template-preview/print.pdf", - content=b"\x00\x01", - headers={"X-pdf-page-count": page_count}, - status_code=200, - ) - - _, billable_units = get_letters_pdf( - sample_letter_template, - contact_block=contact_block, - filename=filename, - values=None, - ) - - assert billable_units == expected_billable_units - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-12-04 17:31:00") -def test_create_letters_pdf_calls_s3upload(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", "1")) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - create_letters_pdf(sample_letter_notification.id) - - mock_s3.assert_called_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - file_location="2017-12-04/NOTIFY.FOO.D.2.C.C.20171204173100.PDF", - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-12-04 17:31:00") -def test_create_letters_pdf_calls_s3upload_for_test_letters(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", "1")) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - sample_letter_notification.key_type = "test" - - create_letters_pdf(sample_letter_notification.id) - - mock_s3.assert_called_with( - bucket_name=current_app.config["TEST_LETTERS_BUCKET_NAME"], - file_location="NOTIFY.FOO.D.2.C.C.20171204173100.PDF", - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_sets_billable_units(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - - create_letters_pdf(sample_letter_notification.id) - noti = Notification.query.filter(Notification.reference == sample_letter_notification.reference).one() - assert noti.billable_units == 1 - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_non_existent_notification(notify_api, mocker, fake_uuid): - with pytest.raises(expected_exception=NoResultFound): - create_letters_pdf(fake_uuid) - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_handles_request_errors(mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", side_effect=RequestException) - mock_retry = mocker.patch("app.celery.letters_pdf_tasks.create_letters_pdf.retry") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_get_letters_pdf.called - assert mock_retry.called - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_handles_s3_errors(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mock_s3 = mocker.patch( - "app.letters.utils.s3upload", - side_effect=ClientError(error_response, "operation_name"), - ) - mock_retry = mocker.patch("app.celery.letters_pdf_tasks.create_letters_pdf.retry") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_s3.called - assert mock_retry.called - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_pdf_sets_technical_failure_max_retries(mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", side_effect=RequestException) - mock_retry = mocker.patch( - "app.celery.letters_pdf_tasks.create_letters_pdf.retry", - side_effect=MaxRetriesExceededError, - ) - mock_update_noti = mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_get_letters_pdf.called - assert mock_retry.called - mock_update_noti.assert_called_once_with(sample_letter_notification.id, "technical-failure") - - -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_gets_the_right_logo_when_service_has_no_logo(notify_api, mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - mock_get_letters_pdf.assert_called_once_with( - sample_letter_notification.template, - contact_block=sample_letter_notification.reply_to_text, - filename=None, - values=sample_letter_notification.personalisation, - ) - - -# We only need this while we are migrating to the new letter_branding model -@pytest.mark.skip(reason="Letter tests") -def test_create_letters_gets_the_right_logo_when_service_has_letter_branding_logo(notify_api, mocker, sample_letter_notification): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - sample_letter_notification.service.letter_branding = letter_branding - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - mock_get_letters_pdf.assert_called_once_with( - sample_letter_notification.template, - contact_block=sample_letter_notification.reply_to_text, - filename=sample_letter_notification.service.letter_branding.filename, - values=sample_letter_notification.personalisation, - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_collate_letter_pdfs_for_day(notify_api, mocker): - mock_s3 = mocker.patch( - "app.celery.tasks.s3.get_s3_bucket_objects", - return_value=[ - {"Key": "B.pDf", "Size": 2}, - {"Key": "A.PDF", "Size": 1}, - {"Key": "C.pdf", "Size": 3}, - ], - ) - mock_group_letters = mocker.patch( - "app.celery.letters_pdf_tasks.group_letters", - return_value=[ - [{"Key": "A.PDF", "Size": 1}, {"Key": "B.pDf", "Size": 2}], - [{"Key": "C.pdf", "Size": 3}], - ], - ) - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - - collate_letter_pdfs_for_day("2017-01-02") - - mock_s3.assert_called_once_with("test-letters-pdf", subfolder="2017-01-02") - mock_group_letters.assert_called_once_with(sorted(mock_s3.return_value, key=lambda x: x["Key"])) - assert mock_celery.call_args_list[0] == call( - name="zip-and-send-letter-pdfs", - kwargs={ - "filenames_to_zip": ["A.PDF", "B.pDf"], - "upload_filename": "NOTIFY.2017-01-02.001.oqdjIM2-NAUU9Sm5Slmi.ZIP", - }, - queue="process-ftp-tasks", - compression="zlib", - ) - assert mock_celery.call_args_list[1] == call( - name="zip-and-send-letter-pdfs", - kwargs={ - "filenames_to_zip": ["C.pdf"], - "upload_filename": "NOTIFY.2017-01-02.002.tdr7hcdPieiqjkVoS4kU.ZIP", - }, - queue="process-ftp-tasks", - compression="zlib", - ) - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-09-12 17:50:00") -def test_collate_letter_pdfs_for_day_works_without_date_param(notify_api, mocker): - mock_s3 = mocker.patch("app.celery.tasks.s3.get_s3_bucket_objects") - collate_letter_pdfs_for_day() - expected_date = "2018-09-12" - mock_s3.assert_called_once_with("test-letters-pdf", subfolder=expected_date) - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_splits_on_file_size(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - # ends under max but next one is too big - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - # ends on exactly max - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - # exactly max goes in next file - {"Key": "F.pdf", "Size": 5}, - # if it's bigger than the max, still gets included - {"Key": "G.pdf", "Size": 6}, - # whatever's left goes in last list - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - - with set_config_values(notify_api, {"MAX_LETTER_PDF_ZIP_FILESIZE": 5}): - x = group_letters(letters) - - assert next(x) == [{"Key": "A.pdf", "Size": 1}, {"Key": "B.pdf", "Size": 2}] - assert next(x) == [ - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "F.pdf", "Size": 5}] - assert next(x) == [{"Key": "G.pdf", "Size": 6}] - assert next(x) == [{"Key": "H.pdf", "Size": 1}, {"Key": "I.pdf", "Size": 1}] - # make sure iterator is exhausted - assert next(x, None) is None - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_splits_on_file_count(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - {"Key": "F.pdf", "Size": 5}, - {"Key": "G.pdf", "Size": 6}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - - with set_config_values(notify_api, {"MAX_LETTER_PDF_COUNT_PER_ZIP": 3}): - x = group_letters(letters) - - assert next(x) == [ - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - {"Key": "C.pdf", "Size": 3}, - ] - assert next(x) == [ - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - {"Key": "F.pdf", "Size": 5}, - ] - assert next(x) == [ - {"Key": "G.pdf", "Size": 6}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - # make sure iterator is exhausted - assert next(x, None) is None - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_splits_on_file_size_and_file_count(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - # ends under max file size but next file is too big - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - # ends on exactly max number of files and file size - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - # exactly max file size goes in next file - {"Key": "F.pdf", "Size": 5}, - # file size is within max but number of files reaches limit - {"Key": "G.pdf", "Size": 1}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - # whatever's left goes in last list - {"Key": "J.pdf", "Size": 1}, - ] - - with set_config_values( - notify_api, - {"MAX_LETTER_PDF_ZIP_FILESIZE": 5, "MAX_LETTER_PDF_COUNT_PER_ZIP": 3}, - ): - x = group_letters(letters) - - assert next(x) == [{"Key": "A.pdf", "Size": 1}, {"Key": "B.pdf", "Size": 2}] - assert next(x) == [ - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "F.pdf", "Size": 5}] - assert next(x) == [ - {"Key": "G.pdf", "Size": 1}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "J.pdf", "Size": 1}] - # make sure iterator is exhausted - assert next(x, None) is None - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_ignores_non_pdfs(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [{"Key": "A.zip"}] - assert list(group_letters(letters)) == [] - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_ignores_notifications_already_sent(notify_api, mocker): - mock = mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=False) - letters = [{"Key": "A.pdf"}] - assert list(group_letters(letters)) == [] - mock.assert_called_once_with("A.pdf") - - -@pytest.mark.skip(reason="Letter tests") -def test_group_letters_with_no_letters(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - assert list(group_letters([])) == [] - - -@pytest.mark.skip(reason="Letter tests") -def test_letter_in_created_state(sample_notification): - sample_notification.reference = "ABCDEF1234567890" - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - - assert letter_in_created_state(filename) is True - - -@pytest.mark.skip(reason="Letter tests") -def test_letter_in_created_state_fails_if_notification_not_in_created( - sample_notification, -): - sample_notification.reference = "ABCDEF1234567890" - sample_notification.status = NOTIFICATION_SENDING - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - assert letter_in_created_state(filename) is False - - -@pytest.mark.skip(reason="Letter tests") -def test_letter_in_created_state_fails_if_notification_doesnt_exist( - sample_notification, -): - sample_notification.reference = "QWERTY1234567890" - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - assert letter_in_created_state(filename) is False - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize( - "key_type,noti_status,bucket_config_name,destination_folder", - [ - ( - KEY_TYPE_NORMAL, - NOTIFICATION_CREATED, - "LETTERS_PDF_BUCKET_NAME", - "2018-01-01/", - ), - (KEY_TYPE_TEST, NOTIFICATION_DELIVERED, "TEST_LETTERS_BUCKET_NAME", ""), - ], -) -def test_process_letter_task_check_virus_scan_passed( - sample_letter_template, - mocker, - key_type, - noti_status, - bucket_config_name, - destination_folder, -): - letter_notification = save_notification( - create_notification( - template=sample_letter_template, - billable_units=0, - status="pending-virus-check", - key_type=key_type, - reference="{} letter".format(key_type), - ) - ) - filename = "NOTIFY.{}".format(letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config[bucket_config_name] - - conn = boto3.resource("s3") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"old_pdf") - - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=1) - mock_s3upload = mocker.patch("app.celery.letters_pdf_tasks.s3upload") - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert letter_notification.status == noti_status - assert letter_notification.billable_units == 1 - assert rmock.called - assert rmock.request_history[0].url == endpoint - - mock_s3upload.assert_called_once_with( - bucket_name=target_bucket_name, - filedata=b"new_pdf", - file_location=destination_folder + filename, - region="ca-central-1", - ) - mock_get_page_count.assert_called_once_with(b"old_pdf") - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize("key_type", [KEY_TYPE_NORMAL, KEY_TYPE_TEST]) -def test_process_letter_task_check_virus_scan_passed_when_sanitise_fails(sample_letter_notification, mocker, key_type): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - - conn = boto3.resource("s3") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_move_s3 = mocker.patch("app.letters.utils._move_s3_object") - mock_sanitise = mocker.patch("app.celery.letters_pdf_tasks._sanitise_precompiled_pdf", return_value=None) - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=2) - - process_virus_scan_passed(filename) - - assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED - assert sample_letter_notification.billable_units == 0 - mock_sanitise.assert_called_once_with(ANY, sample_letter_notification, b"pdf_content") - mock_move_s3.assert_called_once_with(source_bucket_name, filename, target_bucket_name, filename) - - mock_get_page_count.assert_called_once_with(b"pdf_content") - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize( - "key_type,notification_status,bucket_config_name", - [ - (KEY_TYPE_NORMAL, NOTIFICATION_CREATED, "LETTERS_PDF_BUCKET_NAME"), - (KEY_TYPE_TEST, NOTIFICATION_DELIVERED, "TEST_LETTERS_BUCKET_NAME"), - ], -) -def test_process_letter_task_check_virus_scan_passed_when_redaction_fails( - sample_letter_notification, - mocker, - key_type, - notification_status, - bucket_config_name, -): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config[bucket_config_name] - - conn = boto3.resource("s3") - conn.create_bucket(Bucket=bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_copy_s3 = mocker.patch("app.letters.utils._copy_s3_object") - mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=2) - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "redaction_failed_message": "No matches for address block during redaction procedure", - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert sample_letter_notification.billable_units == 2 - assert sample_letter_notification.status == notification_status - if key_type == KEY_TYPE_NORMAL: - mock_copy_s3.assert_called_once_with(bucket_name, filename, bucket_name, "REDACTION_FAILURE/" + filename) - else: - mock_copy_s3.assert_not_called() - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize("key_type", [KEY_TYPE_NORMAL, KEY_TYPE_TEST]) -def test_process_letter_task_check_virus_scan_passed_when_file_cannot_be_opened(sample_letter_notification, mocker, key_type): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - - conn = boto3.resource("s3") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_move_s3 = mocker.patch("app.letters.utils._move_s3_object") - - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", side_effect=PdfReadError) - mock_sanitise = mocker.patch("app.celery.letters_pdf_tasks._sanitise_precompiled_pdf") - - process_virus_scan_passed(filename) - - mock_sanitise.assert_not_called() - mock_get_page_count.assert_called_once_with(b"pdf_content") - mock_move_s3.assert_called_once_with(source_bucket_name, filename, target_bucket_name, filename) - assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED - assert sample_letter_notification.billable_units == 0 - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -def test_process_virus_scan_passed_logs_error_and_sets_tech_failure_if_s3_error_uploading_to_live_bucket( - mocker, - sample_letter_notification, -): - mock_logger = mocker.patch("app.celery.tasks.current_app.logger.exception") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - conn = boto3.resource("s3") - conn.create_bucket(Bucket=source_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=1) - - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mocker.patch( - "app.celery.letters_pdf_tasks._upload_pdf_to_test_or_live_pdf_bucket", - side_effect=ClientError(error_response, "operation_name"), - ) - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - mock_logger.assert_called_once_with( - "Error uploading letter to live pdf bucket for notification: {}".format(sample_letter_notification.id) - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_move_invalid_letter_and_update_status_logs_error_and_sets_tech_failure_state_if_s3_error( - mocker, - sample_letter_notification, -): - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mocker.patch( - "app.celery.letters_pdf_tasks.move_scan_to_invalid_pdf_bucket", - side_effect=ClientError(error_response, "operation_name"), - ) - mock_logger = mocker.patch("app.celery.tasks.current_app.logger.exception") - - _move_invalid_letter_and_update_status(sample_letter_notification, "filename", mocker.Mock()) - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - mock_logger.assert_called_once_with( - "Error when moving letter with id {} to invalid PDF bucket".format(sample_letter_notification.id) - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_process_letter_task_check_virus_scan_failed(sample_letter_notification, mocker): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_move_failed_pdf = mocker.patch("app.celery.letters_pdf_tasks.move_failed_pdf") - - with pytest.raises(VirusScanError) as e: - process_virus_scan_failed(filename) - - assert "Virus scan failed:" in str(e) - mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.FAILURE) - assert sample_letter_notification.status == NOTIFICATION_VIRUS_SCAN_FAILED - - -@pytest.mark.skip(reason="Letter tests") -def test_process_letter_task_check_virus_scan_error(sample_letter_notification, mocker): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_move_failed_pdf = mocker.patch("app.celery.letters_pdf_tasks.move_failed_pdf") - - with pytest.raises(VirusScanError) as e: - process_virus_scan_error(filename) - - assert "Virus scan error:" in str(e.value) - mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.ERROR) - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - - -@pytest.mark.skip(reason="Letter tests") -def test_replay_letters_in_error_for_all_letters_in_error_bucket(notify_api, mocker): - mockObject = boto3.resource("s3").Object("ERROR", "ERROR/file_name") - mocker.patch( - "app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", - return_value=[mockObject], - ) - mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket") - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - replay_letters_in_error() - mock_move.assert_called_once_with("file_name") - mock_celery.assert_called_once_with(name="scan-file", kwargs={"filename": "file_name"}, queue="antivirus-tasks") - - -@pytest.mark.skip(reason="Letter tests") -def test_replay_letters_in_error_for_one_file(notify_api, mocker): - mockObject = boto3.resource("s3").Object("ERROR", "ERROR/file_name") - mocker.patch( - "app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", - return_value=[mockObject], - ) - mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket") - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - replay_letters_in_error("file_name") - mock_move.assert_called_once_with("file_name") - mock_celery.assert_called_once_with(name="scan-file", kwargs={"filename": "file_name"}, queue="antivirus-tasks") - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_returns_data_from_template_preview(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - response = _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - assert rmock.called - assert rmock.request_history[0].url == endpoint - - assert base64.b64decode(response.json()["file"].encode()) == b"new_pdf" - assert rmock.last_request.text == "old_pdf" - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_returns_none_on_validation_error(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"nyan").decode("utf-8"), - "validation_passed": False, - "errors": { - "content_outside_of_printable_area": [1], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=400, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - response = _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - assert rmock.called - assert rmock.request_history[0].url == endpoint - - assert response is None - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_passes_the_service_id_and_notification_id_to_template_preview( - mocker, - sample_letter_notification, -): - tp_mock = mocker.patch("app.celery.letters_pdf_tasks.requests_post") - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_celery = Mock(**{"retry.side_effect": Retry}) - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - service_id = str(sample_letter_notification.service_id) - notification_id = str(sample_letter_notification.id) - - tp_mock.assert_called_once_with( - "http://localhost:9999/precompiled/sanitise", - data=b"old_pdf", - headers={ - "Authorization": "Token my-secret-key", - "Service-ID": service_id, - "Notification-ID": notification_id, - }, - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_retries_on_http_error(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - rmock.post( - "http://localhost:9999/precompiled/sanitise", - content=b"new_pdf", - status_code=500, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - - with pytest.raises(Retry): - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - -@pytest.mark.skip(reason="Letter tests") -def test_sanitise_precompiled_pdf_sets_notification_to_technical_failure_after_too_many_errors(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - rmock.post( - "http://localhost:9999/precompiled/sanitise", - content=b"new_pdf", - status_code=500, - ) - mock_celery = Mock(**{"retry.side_effect": MaxRetriesExceededError}) - - with pytest.raises(MaxRetriesExceededError): - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index 4cdc277db9..7de3d47b74 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -17,8 +17,8 @@ delete_sms_notifications_older_than_retention, letter_raise_alert_if_no_ack_file_for_zip, raise_alert_if_letter_notifications_still_sending, - remove_letter_csv_files, - remove_sms_email_csv_files, + remove_letter_jobs, + remove_sms_email_jobs, remove_transformed_dvla_files, s3, send_daily_performance_platform_stats, @@ -72,11 +72,11 @@ def mock_s3_get_list_diff(bucket_name, subfolder="", suffix="", last_modified=No @freeze_time("2016-10-18T10:00:00") -def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_db_session, mocker, sample_template): +def test_will_archive_jobs_older_than_seven_days(notify_db, notify_db_session, mocker, sample_template): """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") seven_days_ago = datetime.utcnow() - timedelta(days=7) just_under_seven_days = seven_days_ago + timedelta(seconds=1) @@ -91,22 +91,20 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_ dont_delete_me_1 = create_job(sample_template, created_at=seven_days_ago) create_job(sample_template, created_at=just_under_seven_days) - remove_sms_email_csv_files() + remove_sms_email_jobs() - assert s3.remove_job_from_s3.call_args_list == [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - ] + args = s3.remove_jobs_from_s3.call_args.args[0] + assert sorted(args, key=lambda x: x.id) == sorted([job1_to_delete, job2_to_delete], key=lambda x: x.id) assert job1_to_delete.archived is True assert dont_delete_me_1.archived is False @freeze_time("2016-10-18T10:00:00") -def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, notify_db_session, mocker): +def test_will_archive_jobs_older_than_retention_period(notify_db, notify_db_session, mocker): """ Jobs older than retention period are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") service_1 = create_service(service_name="service 1") service_2 = create_service(service_name="service 2") create_service_data_retention(service=service_1, notification_type=SMS_TYPE, days_of_retention=3) @@ -129,22 +127,17 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, n job3_to_delete = create_job(email_template_service_2, created_at=thirty_one_days_ago) job4_to_delete = create_job(sms_template_service_2, created_at=eight_days_ago) - remove_sms_email_csv_files() + remove_sms_email_jobs() - s3.remove_job_from_s3.assert_has_calls( - [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - call(job3_to_delete.service_id, job3_to_delete.id), - call(job4_to_delete.service_id, job4_to_delete.id), - ], - any_order=True, + args = s3.remove_jobs_from_s3.call_args.args[0] + assert sorted(args, key=lambda x: x.id) == sorted( + [job1_to_delete, job2_to_delete, job3_to_delete, job4_to_delete], key=lambda x: x.id ) @freeze_time("2017-01-01 10:00:00") -def test_remove_csv_files_filters_by_type(mocker, sample_service): - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") +def test_archive_jobs_by_type(mocker, sample_service): + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ @@ -156,11 +149,9 @@ def test_remove_csv_files_filters_by_type(mocker, sample_service): job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) create_job(template=sms_template, created_at=eight_days_ago) - remove_letter_csv_files() + remove_letter_jobs() - assert s3.remove_job_from_s3.call_args_list == [ - call(job_to_delete.service_id, job_to_delete.id), - ] + assert s3.remove_jobs_from_s3.call_args.args[0] == [job_to_delete] def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): diff --git a/tests/app/celery/test_process_pinpoint_receipts_tasks.py b/tests/app/celery/test_process_pinpoint_receipts_tasks.py new file mode 100644 index 0000000000..ea9bfc0654 --- /dev/null +++ b/tests/app/celery/test_process_pinpoint_receipts_tasks.py @@ -0,0 +1,242 @@ +from datetime import datetime + +import pytest +from freezegun import freeze_time + +from app import statsd_client +from app.aws.mocks import ( + pinpoint_delivered_callback, + pinpoint_failed_callback, + pinpoint_shortcode_delivered_callback, + pinpoint_successful_callback, +) +from app.celery.process_pinpoint_receipts_tasks import process_pinpoint_results +from app.dao.notifications_dao import get_notification_by_id +from app.models import ( + NOTIFICATION_DELIVERED, + NOTIFICATION_PERMANENT_FAILURE, + NOTIFICATION_SENT, + NOTIFICATION_TECHNICAL_FAILURE, + NOTIFICATION_TEMPORARY_FAILURE, +) +from app.notifications.callbacks import create_delivery_status_callback_data +from celery.exceptions import MaxRetriesExceededError +from tests.app.conftest import create_sample_notification +from tests.app.db import ( + create_notification, + create_service_callback_api, + save_notification, +) + + +@pytest.mark.parametrize( + "callback, expected_response", + [ + (pinpoint_delivered_callback, "Message has been accepted by phone"), + (pinpoint_shortcode_delivered_callback, "Message has been accepted by phone carrier"), + ], +) +def test_process_pinpoint_results_delivered(sample_template, notify_db, notify_db_session, callback, expected_response, mocker): + mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.info") + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + notification = create_sample_notification( + notify_db, + notify_db_session, + template=sample_template, + reference="ref", + status=NOTIFICATION_SENT, + sent_by="pinpoint", + sent_at=datetime.utcnow(), + ) + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + + process_pinpoint_results(callback(reference="ref")) + + assert mock_callback_task.called_once_with(get_notification_by_id(notification.id)) + assert get_notification_by_id(notification.id).status == NOTIFICATION_DELIVERED + assert get_notification_by_id(notification.id).provider_response == expected_response + + mock_logger.assert_called_once_with(f"Pinpoint callback return status of delivered for notification: {notification.id}") + + +def test_process_pinpoint_results_succeeded(sample_template, notify_db, notify_db_session, mocker): + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + notification = create_sample_notification( + notify_db, + notify_db_session, + template=sample_template, + reference="ref", + status=NOTIFICATION_SENT, + sent_by="pinpoint", + sent_at=datetime.utcnow(), + ) + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + + process_pinpoint_results(pinpoint_successful_callback(reference="ref")) + + assert mock_callback_task.not_called() + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + assert get_notification_by_id(notification.id).provider_response is None + + +@pytest.mark.parametrize( + "provider_response, expected_status, should_log_warning, should_save_provider_response", + [ + ( + "Blocked as spam by phone carrier", + NOTIFICATION_TECHNICAL_FAILURE, + False, + True, + ), + ( + "Phone carrier is currently unreachable/unavailable", + NOTIFICATION_TEMPORARY_FAILURE, + False, + True, + ), + ( + "Phone is currently unreachable/unavailable", + NOTIFICATION_PERMANENT_FAILURE, + False, + True, + ), + ("This is not a real response", NOTIFICATION_TECHNICAL_FAILURE, True, True), + ], +) +def test_process_pinpoint_results_failed( + sample_template, + notify_db, + notify_db_session, + mocker, + provider_response, + expected_status, + should_log_warning, + should_save_provider_response, +): + mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.info") + mock_warning_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.warning") + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + notification = create_sample_notification( + notify_db, + notify_db_session, + template=sample_template, + reference="ref", + status=NOTIFICATION_SENT, + sent_by="pinpoint", + sent_at=datetime.utcnow(), + ) + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + process_pinpoint_results(pinpoint_failed_callback(provider_response=provider_response, reference="ref")) + + assert mock_callback_task.called_once_with(get_notification_by_id(notification.id)) + assert get_notification_by_id(notification.id).status == expected_status + + if should_save_provider_response: + assert get_notification_by_id(notification.id).provider_response == provider_response + else: + assert get_notification_by_id(notification.id).provider_response is None + + mock_logger.assert_called_once_with( + ( + f"Pinpoint delivery failed: notification id {notification.id} and reference ref has error found. " + f"Provider response: {provider_response}" + ) + ) + + assert mock_warning_logger.call_count == int(should_log_warning) + + +def test_pinpoint_callback_should_retry_if_notification_is_missing(notify_db, mocker): + mock_retry = mocker.patch("app.celery.process_pinpoint_receipts_tasks.process_pinpoint_results.retry") + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + process_pinpoint_results(pinpoint_delivered_callback(reference="ref")) + + mock_callback_task.assert_not_called() + assert mock_retry.call_count == 1 + + +def test_pinpoint_callback_should_give_up_after_max_tries(notify_db, mocker): + mocker.patch( + "app.celery.process_pinpoint_receipts_tasks.process_pinpoint_results.retry", + side_effect=MaxRetriesExceededError, + ) + mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.warning") + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + process_pinpoint_results(pinpoint_delivered_callback(reference="ref")) is None + mock_callback_task.assert_not_called() + + mock_logger.assert_called_with("notification not found for Pinpoint reference: ref (update to delivered). Giving up.") + + +def test_process_pinpoint_results_retry_called(sample_template, mocker): + save_notification( + create_notification( + sample_template, + reference="ref1", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="pinpoint", + ) + ) + + mocker.patch( + "app.dao.notifications_dao._update_notification_status", + side_effect=Exception("EXPECTED"), + ) + mocked = mocker.patch("app.celery.process_pinpoint_receipts_tasks.process_pinpoint_results.retry") + process_pinpoint_results(response=pinpoint_delivered_callback(reference="ref1")) + assert mocked.call_count == 1 + + +def test_process_pinpoint_results_does_not_process_other_providers(sample_template, mocker): + mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.exception") + mock_dao = mocker.patch("app.dao.notifications_dao._update_notification_status") + save_notification( + create_notification( + sample_template, + reference="ref1", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="sns", + ) + ) + + process_pinpoint_results(response=pinpoint_delivered_callback(reference="ref1")) is None + assert mock_logger.called_once_with("") + assert not mock_dao.called + + +def test_process_pinpoint_results_calls_service_callback(sample_template, notify_db_session, notify_db, mocker): + with freeze_time("2021-01-01T12:00:00"): + mocker.patch("app.statsd_client.incr") + mocker.patch("app.statsd_client.timing_with_dates") + mock_send_status = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async") + mock_callback = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + notification = create_sample_notification( + notify_db, + notify_db_session, + template=sample_template, + reference="ref", + status=NOTIFICATION_SENT, + sent_by="pinpoint", + sent_at=datetime.utcnow(), + ) + callback_api = create_service_callback_api(service=sample_template.service, url="https://example.com") + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + + process_pinpoint_results(pinpoint_delivered_callback(reference="ref")) + + assert mock_callback.called_once_with(get_notification_by_id(notification.id)) + assert get_notification_by_id(notification.id).status == NOTIFICATION_DELIVERED + assert get_notification_by_id(notification.id).provider_response == "Message has been accepted by phone" + statsd_client.timing_with_dates.assert_any_call("callback.pinpoint.elapsed-time", datetime.utcnow(), notification.sent_at) + statsd_client.incr.assert_any_call("callback.pinpoint.delivered") + updated_notification = get_notification_by_id(notification.id) + signed_data = create_delivery_status_callback_data(updated_notification, callback_api) + mock_send_status.assert_called_once_with([str(notification.id), signed_data], queue="service-callbacks") diff --git a/tests/app/celery/test_research_mode_tasks.py b/tests/app/celery/test_research_mode_tasks.py index b5e02ab6d3..b667132f4b 100644 --- a/tests/app/celery/test_research_mode_tasks.py +++ b/tests/app/celery/test_research_mode_tasks.py @@ -8,6 +8,8 @@ from freezegun import freeze_time from app.aws.mocks import ( + pinpoint_delivered_callback, + pinpoint_failed_callback, ses_notification_callback, sns_failed_callback, sns_success_callback, @@ -50,6 +52,30 @@ def test_make_sns_success_callback(notify_api, mocker, phone_number, sns_callbac assert message_celery == sns_callback(**sns_callback_args) +@pytest.mark.parametrize( + "phone_number, pinpoint_callback, pinpoint_callback_args", + [ + ("+15149301630", pinpoint_delivered_callback, {}), + ("+15149301631", pinpoint_delivered_callback, {}), + ("+15149301632", pinpoint_failed_callback, {"provider_response": "Phone is currently unreachable/unavailable"}), + ("+15149301633", pinpoint_failed_callback, {"provider_response": "Phone carrier is currently unreachable/unavailable"}), + ], +) +@freeze_time("2018-01-25 14:00:30") +def test_make_pinpoint_success_callback(notify_api, mocker, phone_number, pinpoint_callback, pinpoint_callback_args): + mock_task = mocker.patch("app.celery.research_mode_tasks.process_pinpoint_results") + some_ref = str(uuid.uuid4()) + now = datetime.now() + timestamp = now.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + + send_sms_response("pinpoint", phone_number, some_ref) + + mock_task.apply_async.assert_called_once_with(ANY, queue=QueueNames.RESEARCH_MODE) + message_celery = mock_task.apply_async.call_args[0][0][0] + pinpoint_callback_args.update({"reference": some_ref, "destination": phone_number, "timestamp": timestamp}) + assert message_celery == pinpoint_callback(**pinpoint_callback_args) + + def test_make_ses_callback(notify_api, mocker): mock_task = mocker.patch("app.celery.research_mode_tasks.process_ses_results") some_ref = str(uuid.uuid4()) diff --git a/tests/app/celery/test_service_callback_tasks.py b/tests/app/celery/test_service_callback_tasks.py index 265734b39e..eda0e212d8 100644 --- a/tests/app/celery/test_service_callback_tasks.py +++ b/tests/app/celery/test_service_callback_tasks.py @@ -90,8 +90,9 @@ def test_send_complaint_to_service_posts_https_request_to_service_with_signed_da @pytest.mark.parametrize("notification_type", ["email", "letter", "sms"]) -def test__send_data_to_service_callback_api_retries_if_request_returns_500_with_signed_data( - notify_db_session, mocker, notification_type +@pytest.mark.parametrize("status_code", [429, 500, 503]) +def test__send_data_to_service_callback_api_retries_if_request_returns_error_code_with_signed_data( + notify_db_session, mocker, notification_type, status_code ): callback_api, template = _set_up_test_data(notification_type, "delivery_status") datestr = datetime(2017, 6, 20) @@ -107,11 +108,11 @@ def test__send_data_to_service_callback_api_retries_if_request_returns_500_with_ signed_data = _set_up_data_for_status_update(callback_api, notification) mocked = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.retry") with requests_mock.Mocker() as request_mock: - request_mock.post(callback_api.url, json={}, status_code=500) + request_mock.post(callback_api.url, json={}, status_code=status_code) send_delivery_status_to_service(notification.id, signed_status_update=signed_data) assert mocked.call_count == 1 - assert mocked.call_args[1]["queue"] == "retry-tasks" + assert mocked.call_args[1]["queue"] == "service-callbacks-retry" @pytest.mark.parametrize("notification_type", ["email", "letter", "sms"]) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 437d59e7ec..aeeb6c8c76 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -2,7 +2,7 @@ import uuid from datetime import datetime, timedelta from unittest import mock -from unittest.mock import Mock, call +from unittest.mock import MagicMock, Mock, call import pytest import requests_mock @@ -891,9 +891,10 @@ def test_process_rows_sends_save_task( mocker.patch("app.celery.tasks.create_uuid", return_value="noti_uuid") task_mock = mocker.patch("app.celery.tasks.{}".format(expected_function)) signer_mock = mocker.patch("app.celery.tasks.signer_notification.sign") - template = Mock(id="template_id", template_type=template_type, process_type=NORMAL) + template = MagicMock(id="template_id", template_type=template_type, process_type=NORMAL) job = Mock(id="job_id", template_version="temp_vers", notification_count=1, api_key_id=api_key_id, sender_id=sender_id) service = Mock(id="service_id", research_mode=research_mode) + template.__len__.return_value = 1 process_rows( [ @@ -950,10 +951,11 @@ def test_should_redirect_email_job_to_queue_depending_on_csv_threshold( ): mock_save_email = mocker.patch("app.celery.tasks.save_emails") - template = Mock(id=1, template_type=EMAIL_TYPE, process_type=template_process_type) + template = MagicMock(id=1, template_type=EMAIL_TYPE, process_type=template_process_type) api_key = Mock(id=1, key_type=KEY_TYPE_NORMAL) job = Mock(id=1, template_version="temp_vers", notification_count=1, api_key=api_key) service = Mock(id=1, research_mode=False) + template.__len__.return_value = 1 row = next( RecipientCSV( @@ -994,10 +996,11 @@ def test_should_redirect_sms_job_to_queue_depending_on_csv_threshold( ): mock_save_sms = mocker.patch("app.celery.tasks.save_smss") - template = Mock(id=1, template_type=SMS_TYPE, process_type=template_process_type) + template = MagicMock(id=1, template_type=SMS_TYPE, process_type=template_process_type) api_key = Mock(id=1, key_type=KEY_TYPE_NORMAL) job = Mock(id=1, template_version="temp_vers", notification_count=1, api_key=api_key) service = Mock(id=1, research_mode=False) + template.__len__.return_value = 1 row = next( RecipientCSV( @@ -1066,7 +1069,8 @@ def test_process_rows_works_without_key_type( mocker.patch("app.celery.tasks.create_uuid", return_value="noti_uuid") task_mock = mocker.patch("app.celery.tasks.{}".format(expected_function)) signer_mock = mocker.patch("app.celery.tasks.signer_notification.sign") - template = Mock(id="template_id", template_type=template_type, process_type=NORMAL) + template = MagicMock(id="template_id", template_type=template_type, process_type=NORMAL) + template.__len__.return_value = 1 api_key = {} job = Mock( id="job_id", @@ -1154,7 +1158,7 @@ def test_save_sms_should_use_redis_cache_to_retrieve_service_and_template_when_p notification["sender_id"] = sender_id sms_sender = ServiceSmsSender() - sms_sender.sms_sender = "+16502532222" + sms_sender.sms_sender = "6135550123" mocked_get_sender_id = mocker.patch("app.celery.tasks.dao_get_service_sms_senders_by_id", return_value=sms_sender) celery_task = "deliver_throttled_sms" if sender_id else "deliver_sms" mocked_deliver_sms = mocker.patch(f"app.celery.provider_tasks.{celery_task}.apply_async") @@ -1191,6 +1195,8 @@ def test_save_sms_should_use_redis_cache_to_retrieve_service_and_template_when_p assert persisted_notification.personalisation == {"name": "Jo"} assert persisted_notification._personalisation == signer_personalisation.sign({"name": "Jo"}) assert persisted_notification.notification_type == "sms" + assert persisted_notification.reply_to_text == (f"+1{sms_sender.sms_sender}" if sender_id else None) + mocked_deliver_sms.assert_called_once_with( [str(persisted_notification.id)], queue="send-throttled-sms-tasks" if sender_id else QueueNames.SEND_SMS_MEDIUM ) diff --git a/tests/app/clients/test_aws_pinpoint.py b/tests/app/clients/test_aws_pinpoint.py new file mode 100644 index 0000000000..ad7546d1ad --- /dev/null +++ b/tests/app/clients/test_aws_pinpoint.py @@ -0,0 +1,73 @@ +import pytest + +from app import aws_pinpoint_client +from tests.conftest import set_config_values + + +@pytest.mark.serial +def test_send_sms_sends_to_default_pool(notify_api, mocker, sample_template): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "6135555555" + content = "foo" + reference = "ref" + + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [], + }, + ): + aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id) + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+16135555555", + OriginationIdentity="default_pool_id", + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) + + +@pytest.mark.serial +def test_send_sms_sends_to_shortcode_pool(notify_api, mocker, sample_template): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "6135555555" + content = "foo" + reference = "ref" + + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sample_template.id)], + }, + ): + aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id) + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+16135555555", + OriginationIdentity="sc_pool_id", + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) + + +def test_send_sms_returns_raises_error_if_there_is_no_valid_number_is_found(notify_api, mocker): + mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + + to = "" + content = reference = "foo" + + with pytest.raises(ValueError) as excinfo: + aws_pinpoint_client.send_sms(to, content, reference) + + assert "No valid numbers found for SMS delivery" in str(excinfo.value) diff --git a/tests/app/clients/test_freshdesk.py b/tests/app/clients/test_freshdesk.py index f730764f29..3e8b56227d 100644 --- a/tests/app/clients/test_freshdesk.py +++ b/tests/app/clients/test_freshdesk.py @@ -127,12 +127,22 @@ def match_json(request): "description": "A new logo has been uploaded by name (test@email.com) for the following service:
" "- Service id: 8624bd36-b70b-4d4b-a459-13e1f4770b92
" "- Service name: t6
" + "- Organisation id: 6b72e84f-8591-42e1-93b8-7d24a45e1d79
" + "- Organisation name: best org name ever
" "- Logo filename: branding_url
" + "- Logo name: branding_logo_name
" + "- Alt text english: en alt text
" + "- Alt text french: fr alt text
" "

" "Un nouveau logo a été téléchargé par name (test@email.com) pour le service suivant :
" "- Identifiant du service : 8624bd36-b70b-4d4b-a459-13e1f4770b92
" "- Nom du service : t6
" - "- Nom du fichier du logo : branding_url", + "- Identifiant de l'organisation: 6b72e84f-8591-42e1-93b8-7d24a45e1d79
" + "- Nom de l'organisation: best org name ever
" + "- Nom du fichier du logo : branding_url
" + "- Nom du logo : branding_logo_name
" + "- Texte alternatif anglais : en alt text
" + "- Texte alternatif français : fr alt text", "email": "test@email.com", "priority": 1, "status": 2, @@ -158,8 +168,13 @@ def match_json(request): "friendly_support_type": "Branding request", "support_type": "branding_request", "service_name": "t6", + "organisation_id": "6b72e84f-8591-42e1-93b8-7d24a45e1d79", + "department_org_name": "best org name ever", "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", "branding_url": "branding_url", + "branding_logo_name": "branding_logo_name", + "alt_text_en": "en alt text", + "alt_text_fr": "fr alt text", } with notify_api.app_context(): response = freshdesk.Freshdesk(ContactRequest(**data)).send_ticket() diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index 581da5add7..2601522739 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -1,7 +1,6 @@ from datetime import date, datetime, timedelta import pytest -from flask import current_app from freezegun import freeze_time from app.dao.notifications_dao import ( @@ -99,7 +98,7 @@ def _create_templates(sample_service): @pytest.mark.parametrize("month, delete_run_time", [(4, "2016-04-10 23:40"), (1, "2016-01-11 00:40")]) @pytest.mark.parametrize( "notification_type, expected_sms_count, expected_email_count, expected_letter_count", - [("sms", 7, 10, 10), ("email", 10, 7, 10), ("letter", 10, 10, 7)], + [("sms", 7, 10, 10), ("email", 10, 7, 10)], ) def test_should_delete_notifications_by_type_after_seven_days( sample_service, @@ -111,7 +110,6 @@ def test_should_delete_notifications_by_type_after_seven_days( expected_email_count, expected_letter_count, ): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") email_template, letter_template, sms_template = _create_templates(sample_service) # create one notification a day between 1st and 10th from 11:00 to 19:00 of each type for i in range(1, 11): @@ -157,7 +155,6 @@ def test_should_delete_notifications_by_type_after_seven_days( @freeze_time("2016-01-10 12:00:00.000000") def test_should_not_delete_notification_history(sample_service, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") with freeze_time("2016-01-01 12:00"): email_template, letter_template, sms_template = _create_templates(sample_service) save_notification(create_notification(template=email_template, status="permanent-failure")) @@ -169,22 +166,13 @@ def test_should_not_delete_notification_history(sample_service, mocker): assert NotificationHistory.query.count() == 1 -@pytest.mark.parametrize("notification_type", ["sms", "email", "letter"]) +@pytest.mark.parametrize("notification_type", ["sms", "email"]) def test_delete_notifications_for_days_of_retention(sample_service, notification_type, mocker): - mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") create_test_data(notification_type, sample_service) assert Notification.query.count() == 9 delete_notifications_older_than_retention_by_type(notification_type) assert Notification.query.count() == 7 assert Notification.query.filter_by(notification_type=notification_type).count() == 1 - if notification_type == "letter": - mock_get_s3.assert_called_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - subfolder="{}/NOTIFY.LETTER_REF.D.2.C.C".format(str(datetime.utcnow().date())), - ) - assert mock_get_s3.call_count == 2 - else: - mock_get_s3.assert_not_called() def test_delete_notifications_inserts_notification_history(sample_service): @@ -197,7 +185,6 @@ def test_delete_notifications_inserts_notification_history(sample_service): def test_delete_notifications_updates_notification_history(sample_email_template, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") notification = save_notification( create_notification(template=sample_email_template, created_at=datetime.utcnow() - timedelta(days=8)) ) @@ -232,7 +219,6 @@ def test_delete_notifications_keep_data_for_days_of_retention_is_longer(sample_s def test_delete_notifications_with_test_keys(sample_template, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") save_notification( create_notification( template=sample_template, @@ -279,18 +265,8 @@ def test_delete_notifications_delete_notification_type_for_default_time_if_no_da assert Notification.query.filter_by(notification_type="email").count() == 1 -def test_delete_notifications_does_try_to_delete_from_s3_when_letter_has_not_been_sent(sample_service, mocker): - mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") - letter_template = create_template(service=sample_service, template_type="letter") - - save_notification(create_notification(template=letter_template, status="sending", reference="LETTER_REF")) - delete_notifications_older_than_retention_by_type("email", qry_limit=1) - mock_get_s3.assert_not_called() - - @freeze_time("2016-01-10 12:00:00.000000") def test_should_not_delete_notification_if_history_does_not_exist(sample_service, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") mocker.patch("app.dao.notifications_dao.insert_update_notification_history") with freeze_time("2016-01-01 12:00"): email_template, letter_template, sms_template = _create_templates(sample_service) diff --git a/tests/app/dao/test_api_key_dao.py b/tests/app/dao/test_api_key_dao.py index 5e23002171..eabb4cbdbf 100644 --- a/tests/app/dao/test_api_key_dao.py +++ b/tests/app/dao/test_api_key_dao.py @@ -116,12 +116,27 @@ def test_get_unsigned_secret_returns_key(sample_api_key): assert unsigned_api_key == sample_api_key.secret -def test_get_api_key_by_secret(sample_api_key): - unsigned_secret = get_unsigned_secret(sample_api_key.id) - assert get_api_key_by_secret(unsigned_secret).id == sample_api_key.id - - with pytest.raises(NoResultFound): - get_api_key_by_secret("nope") +class TestGetAPIKeyBySecret: + def test_get_api_key_by_secret(self, sample_api_key): + secret = get_unsigned_secret(sample_api_key.id) + # Create token expected from the frontend + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{secret}" + assert get_api_key_by_secret(unsigned_secret).id == sample_api_key.id + + with pytest.raises(ValueError): + get_api_key_by_secret("nope") + + # Test getting secret without the keyname prefix + with pytest.raises(ValueError): + get_api_key_by_secret(str(sample_api_key.id)) + + # Test the service_name isnt part of the secret + with pytest.raises(ValueError): + get_api_key_by_secret(f"gcntfy-keyname-hello-{secret}") + + # Test the secret is incorrect + with pytest.raises(NoResultFound): + get_api_key_by_secret(f"gcntfy-keyname-hello-{sample_api_key.service_id}-1234") def test_should_not_allow_duplicate_key_names_per_service(sample_api_key, fake_uuid): diff --git a/tests/app/dao/test_email_branding_dao.py b/tests/app/dao/test_email_branding_dao.py index a69c912577..a3bc948a34 100644 --- a/tests/app/dao/test_email_branding_dao.py +++ b/tests/app/dao/test_email_branding_dao.py @@ -5,11 +5,12 @@ dao_update_email_branding, ) from app.models import EmailBranding -from tests.app.db import create_email_branding +from tests.app.db import create_email_branding, create_organisation def test_get_email_branding_options_gets_all_email_branding(notify_db, notify_db_session): - email_branding_1 = create_email_branding(name="test_email_branding_1") + org_1 = create_organisation() + email_branding_1 = create_email_branding(name="test_email_branding_1", organisation_id=org_1.id) email_branding_2 = create_email_branding(name="test_email_branding_2") email_branding = dao_get_email_branding_options() @@ -18,6 +19,13 @@ def test_get_email_branding_options_gets_all_email_branding(notify_db, notify_db assert email_branding_1 == email_branding[0] assert email_branding_2 == email_branding[1] + org_1_id = email_branding_1.organisation_id + + email_branding = dao_get_email_branding_options(filter_by_organisation_id=org_1_id) + assert len(email_branding) == 1 + assert email_branding_1 == email_branding[0] + assert email_branding[0].organisation_id == org_1_id + def test_get_email_branding_by_id_gets_correct_email_branding(notify_db, notify_db_session): email_branding = create_email_branding() diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index 29e1c001f0..58e3007739 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -17,6 +17,7 @@ dao_set_scheduled_jobs_to_pending, dao_update_job, ) +from app.dao.service_data_retention_dao import insert_service_data_retention from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, Job from tests.app.db import ( create_job, @@ -348,6 +349,42 @@ def test_should_get_jobs_seven_days_old_by_scheduled_for_date(sample_service): assert job_to_remain.id not in [job.id for job in jobs] +@freeze_time("2016-10-31 10:00:00") +def test_should_get_limited_number_of_jobs(sample_template): + flexible_retention_service1 = create_service(service_name="Another service 1") + insert_service_data_retention(flexible_retention_service1.id, sample_template.template_type, 3) + flexible_template1 = create_template(flexible_retention_service1, template_type=sample_template.template_type) + + flexible_retention_service2 = create_service(service_name="Another service 2") + insert_service_data_retention(flexible_retention_service2.id, sample_template.template_type, 2) + flexible_template2 = create_template(flexible_retention_service2, template_type=sample_template.template_type) + + eight_days_ago = datetime.utcnow() - timedelta(days=8) + four_days_ago = datetime.utcnow() - timedelta(days=4) + + for _ in range(4): + create_job(flexible_template1, created_at=four_days_ago) + create_job(flexible_template2, created_at=four_days_ago) + create_job(sample_template, created_at=eight_days_ago) + + jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type], limit=3) + + assert len(jobs) == 3 + + +@freeze_time("2016-10-31 10:00:00") +def test_should_get_not_get_limited_number_of_jobs_by_default(sample_template): + eight_days_ago = datetime.utcnow() - timedelta(days=8) + + create_job(sample_template, created_at=eight_days_ago) + create_job(sample_template, created_at=eight_days_ago) + create_job(sample_template, created_at=eight_days_ago) + + jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type]) + + assert len(jobs) == 3 + + def assert_job_stat(job, result, sent, delivered, failed): assert result.job_id == job.id assert result.original_file_name == job.original_file_name diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index 5b8b8e5348..6acce65192 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -241,9 +241,14 @@ def test_get_sms_provider_with_equal_priority_returns_provider( def test_get_current_sms_provider_returns_active_only(restore_provider_details): + # Note that we currently have two active sms providers: sns and pinpoint. current_provider = get_current_provider("sms") current_provider.active = False dao_update_provider_details(current_provider) + current_provider = get_current_provider("sms") + current_provider.active = False + dao_update_provider_details(current_provider) + new_current_provider = get_current_provider("sms") assert new_current_provider is None @@ -308,5 +313,5 @@ def test_dao_get_provider_stats(notify_db_session): assert result[5].identifier == "pinpoint" assert result[5].notification_type == "sms" assert result[5].supports_international is False - assert result[5].active is False + assert result[5].active is True assert result[5].current_month_billable_sms == 0 diff --git a/tests/app/db.py b/tests/app/db.py index 1dacec37bc..c9ff33427c 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -502,13 +502,15 @@ def create_service_callback_api( return service_callback_api -def create_email_branding(colour="blue", logo="test_x2.png", name="test_org_1", text="DisplayName"): +def create_email_branding(colour="blue", logo="test_x2.png", name="test_org_1", text="DisplayName", organisation_id=None): data = { "colour": colour, "logo": logo, "name": name, "text": text, } + if organisation_id: + data["organisation_id"] = organisation_id email_branding = EmailBranding(**data) dao_create_email_branding(email_branding) diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index 018df49123..0768d98cc9 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -51,6 +51,78 @@ from tests.conftest import set_config_values +class TestProviderToUse: + def test_should_use_pinpoint_for_sms_by_default_if_configured(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234") + assert provider.name == "pinpoint" + + def test_should_use_sns_for_sms_by_default_if_partially_configured(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [], + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", template_id=uuid.uuid4()) + assert provider.name == "sns" + + def test_should_use_pinpoint_for_sms_for_sc_template_if_sc_pool_configured(self, restore_provider_details, notify_api): + sc_template = uuid.uuid4() + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sc_template)], + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", template_id=sc_template) + assert provider.name == "pinpoint" + + def test_should_use_sns_for_sms_if_dedicated_number(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", False, "+12345678901") + assert provider.name == "sns" + + def test_should_use_sns_for_sms_if_sending_to_the_US(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+17065551234") + assert provider.name == "sns" + + @pytest.mark.parametrize("sc_pool_id, default_pool_id", [("", "default_pool_id"), ("sc_pool_id", "")]) + def test_should_use_sns_if_pinpoint_not_configured(self, restore_provider_details, notify_api, sc_pool_id, default_pool_id): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": sc_pool_id, + "AWS_PINPOINT_DEFAULT_POOL_ID": default_pool_id, + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234") + assert provider.name == "sns" + + @pytest.mark.skip(reason="Currently using only 1 SMS provider") def test_should_return_highest_priority_active_provider(restore_provider_details): providers = provider_details_dao.get_provider_details_by_notification_type("sms") @@ -84,21 +156,6 @@ def test_should_return_highest_priority_active_provider(restore_provider_details assert send_to_providers.provider_to_use("sms", "1234").name == first.identifier -def test_provider_to_use(restore_provider_details): - providers = provider_details_dao.get_provider_details_by_notification_type("sms") - first = providers[0] - - assert first.identifier == "sns" - - # provider is still SNS if SMS and sender is set - provider = send_to_providers.provider_to_use("sms", "1234", False, "+12345678901") - assert first.identifier == provider.name - - # provider is highest priority sms provider if sender is not set - provider = send_to_providers.provider_to_use("sms", "1234", False) - assert first.identifier == provider.name - - def test_should_send_personalised_template_to_correct_sms_provider_and_persist(sample_sms_template_with_html, mocker): db_notification = save_notification( create_notification( @@ -120,6 +177,7 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist(s content="Sample service: Hello Jo\nHere is some HTML & entities", reference=str(db_notification.id), sender=current_app.config["FROM_NUMBER"], + template_id=sample_sms_template_with_html.id, ) notification = Notification.query.filter_by(id=db_notification.id).one() @@ -338,6 +396,7 @@ def test_send_sms_should_use_template_version_from_notification_not_latest(sampl content="Sample service: This is a template:\nwith a newline", reference=str(db_notification.id), sender=current_app.config["FROM_NUMBER"], + template_id=sample_template.id, ) persisted_notification = notifications_dao.get_notification_by_id(db_notification.id) @@ -416,7 +475,7 @@ def test_should_send_sms_with_downgraded_content(notify_db_session, mocker): send_to_providers.send_sms_to_provider(db_notification) - aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=gsm_message, reference=ANY, sender=ANY) + aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=gsm_message, reference=ANY, sender=ANY, template_id=ANY) def test_send_sms_should_use_service_sms_sender(sample_service, sample_template, mocker): @@ -429,7 +488,9 @@ def test_send_sms_should_use_service_sms_sender(sample_service, sample_template, db_notification, ) - app.aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=ANY, reference=ANY, sender=sms_sender.sms_sender) + app.aws_sns_client.send_sms.assert_called_once_with( + to=ANY, content=ANY, reference=ANY, sender=sms_sender.sms_sender, template_id=ANY + ) @pytest.mark.parametrize("research_mode,key_type", [(True, KEY_TYPE_NORMAL), (False, KEY_TYPE_TEST)]) @@ -596,19 +657,20 @@ def test_get_html_email_renderer_with_branding_details_and_render_fip_banner_eng sample_service.email_branding = None notify_db.session.add_all([sample_service]) notify_db.session.commit() - options = send_to_providers.get_html_email_options(sample_service) assert options == { "fip_banner_english": True, "fip_banner_french": False, "logo_with_background_colour": False, + "alt_text_en": None, + "alt_text_fr": None, } def test_get_html_email_renderer_prepends_logo_path(notify_api): Service = namedtuple("Service", ["email_branding"]) - EmailBranding = namedtuple("EmailBranding", ["brand_type", "colour", "name", "logo", "text"]) + EmailBranding = namedtuple("EmailBranding", ["brand_type", "colour", "name", "logo", "text", "alt_text_en", "alt_text_fr"]) email_branding = EmailBranding( brand_type=BRANDING_ORG_NEW, @@ -616,6 +678,8 @@ def test_get_html_email_renderer_prepends_logo_path(notify_api): logo="justice-league.png", name="Justice League", text="League of Justice", + alt_text_en="alt_text_en", + alt_text_fr="alt_text_fr", ) service = Service( email_branding=email_branding, @@ -628,7 +692,7 @@ def test_get_html_email_renderer_prepends_logo_path(notify_api): def test_get_html_email_renderer_handles_email_branding_without_logo(notify_api): Service = namedtuple("Service", ["email_branding"]) - EmailBranding = namedtuple("EmailBranding", ["brand_type", "colour", "name", "logo", "text"]) + EmailBranding = namedtuple("EmailBranding", ["brand_type", "colour", "name", "logo", "text", "alt_text_en", "alt_text_fr"]) email_branding = EmailBranding( brand_type=BRANDING_ORG_BANNER_NEW, @@ -636,6 +700,8 @@ def test_get_html_email_renderer_handles_email_branding_without_logo(notify_api) logo=None, name="Justice League", text="League of Justice", + alt_text_en="alt_text_en", + alt_text_fr="alt_text_fr", ) service = Service( email_branding=email_branding, @@ -649,6 +715,8 @@ def test_get_html_email_renderer_handles_email_branding_without_logo(notify_api) assert renderer["brand_text"] == "League of Justice" assert renderer["brand_colour"] == "#000000" assert renderer["brand_name"] == "Justice League" + assert renderer["alt_text_en"] == "alt_text_en" + assert renderer["alt_text_fr"] == "alt_text_fr" def test_should_not_update_notification_if_research_mode_on_exception(sample_service, sample_notification, mocker): @@ -793,6 +861,7 @@ def test_should_handle_sms_sender_and_prefix_message( sender=expected_sender, to=ANY, reference=ANY, + template_id=ANY, ) diff --git a/tests/app/email_branding/test_rest.py b/tests/app/email_branding/test_rest.py index c09218d62d..f2a360a17e 100644 --- a/tests/app/email_branding/test_rest.py +++ b/tests/app/email_branding/test_rest.py @@ -4,8 +4,8 @@ from tests.app.db import create_email_branding -def test_get_email_branding_options(admin_request, notify_db, notify_db_session): - email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1") +def test_get_email_branding_options(admin_request, notify_db, notify_db_session, sample_organisation): + email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1", organisation_id=sample_organisation.id) email_branding2 = EmailBranding(colour="#000000", logo="/path/other.png", name="Org2") notify_db.session.add_all([email_branding1, email_branding2]) notify_db.session.commit() @@ -17,10 +17,31 @@ def test_get_email_branding_options(admin_request, notify_db, notify_db_session) str(email_branding1.id), str(email_branding2.id), } + assert email_branding[0]["organisation_id"] == str(sample_organisation.id) + assert email_branding[1]["organisation_id"] == "" + + +def test_get_email_branding_options_filter_org(admin_request, notify_db, notify_db_session, sample_organisation): + email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1", organisation_id=sample_organisation.id) + email_branding2 = EmailBranding(colour="#000000", logo="/path/other.png", name="Org2") + notify_db.session.add_all([email_branding1, email_branding2]) + notify_db.session.commit() + email_branding = admin_request.get("email_branding.get_email_branding_options", organisation_id=sample_organisation.id)[ + "email_branding" + ] + + assert len(email_branding) == 1 + assert email_branding[0]["organisation_id"] == str(sample_organisation.id) + + email_branding2 = admin_request.get("email_branding.get_email_branding_options")["email_branding"] + + assert len(email_branding2) == 2 def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): - email_branding = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Some Org", text="My Org") + email_branding = EmailBranding( + colour="#FFFFFF", logo="/path/image.png", name="Some Org", text="My Org", alt_text_en="hello world" + ) notify_db.session.add(email_branding) notify_db.session.commit() @@ -37,6 +58,9 @@ def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): "id", "text", "brand_type", + "organisation_id", + "alt_text_en", + "alt_text_fr", } assert response["email_branding"]["colour"] == "#FFFFFF" assert response["email_branding"]["logo"] == "/path/image.png" @@ -44,6 +68,8 @@ def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): assert response["email_branding"]["text"] == "My Org" assert response["email_branding"]["id"] == str(email_branding.id) assert response["email_branding"]["brand_type"] == str(email_branding.brand_type) + assert response["email_branding"]["alt_text_en"] == "hello world" + assert response["email_branding"]["alt_text_fr"] is None def test_post_create_email_branding(admin_request, notify_db_session): @@ -52,6 +78,8 @@ def test_post_create_email_branding(admin_request, notify_db_session): "colour": "#0000ff", "logo": "/images/test_x2.png", "brand_type": BRANDING_ORG_NEW, + "alt_text_en": "hello world", + "alt_text_fr": "bonjour le monde", } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) assert data["name"] == response["data"]["name"] @@ -59,6 +87,8 @@ def test_post_create_email_branding(admin_request, notify_db_session): assert data["logo"] == response["data"]["logo"] assert data["name"] == response["data"]["text"] assert data["brand_type"] == response["data"]["brand_type"] + assert data["alt_text_en"] == response["data"]["alt_text_en"] + assert data["alt_text_fr"] == response["data"]["alt_text_fr"] def test_post_create_email_branding_without_brand_type_defaults(admin_request, notify_db_session): @@ -66,16 +96,15 @@ def test_post_create_email_branding_without_brand_type_defaults(admin_request, n "name": "test email_branding", "colour": "#0000ff", "logo": "/images/test_x2.png", + "alt_text_en": "hello world", + "alt_text_fr": "bonjour le monde", } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) assert BRANDING_ORG_NEW == response["data"]["brand_type"] def test_post_create_email_branding_without_logo_is_ok(admin_request, notify_db_session): - data = { - "name": "test email_branding", - "colour": "#0000ff", - } + data = {"name": "test email_branding", "colour": "#0000ff", "alt_text_en": "hello", "alt_text_fr": "bonjour"} response = admin_request.post( "email_branding.create_email_branding", _data=data, @@ -85,13 +114,15 @@ def test_post_create_email_branding_without_logo_is_ok(admin_request, notify_db_ def test_post_create_email_branding_colour_is_valid(admin_request, notify_db_session): - data = {"logo": "images/text_x2.png", "name": "test branding"} + data = {"logo": "images/text_x2.png", "name": "test branding", "alt_text_en": "hello", "alt_text_fr": "bonjour"} response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) assert response["data"]["logo"] == data["logo"] assert response["data"]["name"] == "test branding" assert response["data"]["colour"] is None assert response["data"]["text"] == "test branding" + assert response["data"]["alt_text_en"] == "hello" + assert response["data"]["alt_text_fr"] == "bonjour" def test_post_create_email_branding_with_text(admin_request, notify_db_session): @@ -99,6 +130,8 @@ def test_post_create_email_branding_with_text(admin_request, notify_db_session): "text": "text for brand", "logo": "images/text_x2.png", "name": "test branding", + "alt_text_en": "hello", + "alt_text_fr": "bonjour", } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) @@ -106,6 +139,8 @@ def test_post_create_email_branding_with_text(admin_request, notify_db_session): assert response["data"]["name"] == "test branding" assert response["data"]["colour"] is None assert response["data"]["text"] == "text for brand" + assert response["data"]["alt_text_en"] == "hello" + assert response["data"]["alt_text_fr"] == "bonjour" def test_post_create_email_branding_with_text_and_name(admin_request, notify_db_session): @@ -113,6 +148,8 @@ def test_post_create_email_branding_with_text_and_name(admin_request, notify_db_ "name": "name for brand", "text": "text for brand", "logo": "images/text_x2.png", + "alt_text_en": "hello", + "alt_text_fr": "bonjour", } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) @@ -120,20 +157,35 @@ def test_post_create_email_branding_with_text_and_name(admin_request, notify_db_ assert response["data"]["name"] == "name for brand" assert response["data"]["colour"] is None assert response["data"]["text"] == "text for brand" + assert response["data"]["alt_text_en"] == "hello" + assert response["data"]["alt_text_fr"] == "bonjour" def test_post_create_email_branding_with_text_as_none_and_name(admin_request, notify_db_session): - data = {"name": "name for brand", "text": None, "logo": "images/text_x2.png"} + data = { + "name": "name for brand", + "text": None, + "logo": "images/text_x2.png", + "alt_text_en": "hello", + "alt_text_fr": "bonjour", + } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) assert response["data"]["logo"] == data["logo"] assert response["data"]["name"] == "name for brand" assert response["data"]["colour"] is None assert response["data"]["text"] is None + assert response["data"]["alt_text_en"] == "hello" + assert response["data"]["alt_text_fr"] == "bonjour" def test_post_create_email_branding_returns_400_when_name_is_missing(admin_request, notify_db_session): - data = {"text": "some text", "logo": "images/text_x2.png"} + data = { + "text": "some text", + "logo": "images/text_x2.png", + "alt_text_en": "hello", + "alt_text_fr": "bonjour", + } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=400) assert response["errors"][0]["message"] == "name is a required property" @@ -150,7 +202,7 @@ def test_post_create_email_branding_returns_400_when_name_is_missing(admin_reque ], ) def test_post_update_email_branding_updates_field(admin_request, notify_db_session, data_update): - data = {"name": "test email_branding", "logo": "images/text_x2.png"} + data = {"name": "test email_branding", "logo": "images/text_x2.png", "alt_text_en": "hello", "alt_text_fr": "bonjour"} response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) email_branding_id = response["data"]["id"] @@ -179,7 +231,7 @@ def test_post_update_email_branding_updates_field(admin_request, notify_db_sessi ], ) def test_post_update_email_branding_updates_field_with_text(admin_request, notify_db_session, data_update): - data = {"name": "test email_branding", "logo": "images/text_x2.png"} + data = {"name": "test email_branding", "logo": "images/text_x2.png", "alt_text_en": "hello", "alt_text_fr": "bonjour"} response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) email_branding_id = response["data"]["id"] diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 76f89ebdf9..338d3a5d84 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -8,12 +8,14 @@ import app.celery.tasks from app.dao.templates_dao import dao_update_template -from app.models import JOB_STATUS_PENDING, JOB_STATUS_TYPES +from app.models import JOB_STATUS_PENDING, JOB_STATUS_TYPES, ServiceSmsSender from tests import create_authorization_header from tests.app.db import ( create_ft_notification_status, create_job, create_notification, + create_service_with_inbound_number, + create_template, save_notification, ) from tests.conftest import set_config @@ -263,6 +265,39 @@ def test_create_unscheduled_job_with_sender_id_in_metadata(client, sample_templa app.celery.tasks.process_job.apply_async.assert_called_once_with(([str(fake_uuid)]), queue="job-tasks") +def test_create_job_sets_sender_id_from_database(client, mocker, fake_uuid, sample_user): + service = create_service_with_inbound_number(inbound_number="12345") + template = create_template(service=service) + sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first() + + mocker.patch("app.celery.tasks.process_job.apply_async") + mocker.patch( + "app.job.rest.get_job_metadata_from_s3", + return_value={ + "template_id": str(template.id), + "original_file_name": "thisisatest.csv", + "notification_count": "1", + "valid": "True", + }, + ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) + data = { + "id": fake_uuid, + "created_by": str(template.created_by.id), + } + path = "/service/{}/job".format(service.id) + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + + response = client.post(path, data=json.dumps(data), headers=headers) + resp_json = json.loads(response.get_data(as_text=True)) + + assert resp_json["data"]["sender_id"] == str(sms_sender.id) + + @freeze_time("2016-01-01 12:00:00.000000") def test_create_scheduled_job(client, sample_template, mocker, fake_uuid): scheduled_date = (datetime.utcnow() + timedelta(hours=95, minutes=59)).isoformat() diff --git a/tests/app/letters/__init__.py b/tests/app/letters/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/app/letters/test_letter_utils.py b/tests/app/letters/test_letter_utils.py deleted file mode 100644 index ad4c05e56f..0000000000 --- a/tests/app/letters/test_letter_utils.py +++ /dev/null @@ -1,402 +0,0 @@ -from datetime import datetime - -import boto3 -import pytest -from flask import current_app -from freezegun import freeze_time -from moto import mock_s3 - -from app.letters.utils import ( - ScanErrorType, - copy_redaction_failed_pdf, - get_bucket_name_and_prefix_for_notification, - get_folder_name, - get_letter_pdf, - get_letter_pdf_filename, - letter_print_day, - move_failed_pdf, - upload_letter_pdf, -) -from app.models import ( - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - NOTIFICATION_VALIDATION_FAILED, - PRECOMPILED_TEMPLATE_NAME, -) -from tests.app.db import create_notification, save_notification - -FROZEN_DATE_TIME = "2018-03-14 17:00:00" - - -@pytest.mark.skip(reason="Letter tests") -@pytest.fixture(name="sample_precompiled_letter_notification") -def _sample_precompiled_letter_notification(sample_letter_notification): - sample_letter_notification.template.hidden = True - sample_letter_notification.template.name = PRECOMPILED_TEMPLATE_NAME - sample_letter_notification.reference = "foo" - with freeze_time(FROZEN_DATE_TIME): - sample_letter_notification.created_at = datetime.utcnow() - sample_letter_notification.updated_at = datetime.utcnow() - return sample_letter_notification - - -@pytest.mark.skip(reason="Letter tests") -@pytest.fixture(name="sample_precompiled_letter_notification_using_test_key") -def _sample_precompiled_letter_notification_using_test_key( - sample_precompiled_letter_notification, -): - sample_precompiled_letter_notification.key_type = KEY_TYPE_TEST - return sample_precompiled_letter_notification - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "created_at,folder", - [ - (datetime(2017, 1, 1, 17, 29), "2017-01-01"), - (datetime(2017, 1, 1, 17, 31), "2017-01-02"), - ], -) -@pytest.mark.skip(reason="Letter feature") -def test_get_bucket_name_and_prefix_for_notification_valid_notification(sample_notification, created_at, folder): - sample_notification.created_at = created_at - sample_notification.updated_at = created_at - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert bucket_prefix == "{folder}/NOTIFY.{reference}".format(folder=folder, reference=sample_notification.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -def test_get_bucket_name_and_prefix_for_notification_get_from_sent_at_date( - sample_notification, -): - sample_notification.created_at = datetime(2019, 8, 1, 17, 35) - sample_notification.sent_at = datetime(2019, 8, 2, 17, 45) - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert ( - bucket_prefix - == "{folder}/NOTIFY.{reference}".format(folder="2019-08-02", reference=sample_notification.reference).upper() - ) - - -@pytest.mark.skip(reason="Letter tests") -def test_get_bucket_name_and_prefix_for_notification_from_created_at_date( - sample_notification, -): - sample_notification.created_at = datetime(2019, 8, 1, 12, 00) - sample_notification.updated_at = datetime(2019, 8, 2, 12, 00) - sample_notification.sent_at = datetime(2019, 8, 3, 12, 00) - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert ( - bucket_prefix - == "{folder}/NOTIFY.{reference}".format(folder="2019-08-03", reference=sample_notification.reference).upper() - ) - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_notification_precompiled_letter_using_test_key( - sample_precompiled_letter_notification_using_test_key, -): - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification_using_test_key) - - assert bucket == current_app.config["TEST_LETTERS_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification_using_test_key.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_notification_templated_letter_using_test_key( - sample_letter_notification, -): - sample_letter_notification.key_type = KEY_TYPE_TEST - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_letter_notification) - - assert bucket == current_app.config["TEST_LETTERS_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_letter_notification.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_failed_validation( - sample_precompiled_letter_notification, -): - sample_precompiled_letter_notification.status = NOTIFICATION_VALIDATION_FAILED - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification) - - assert bucket == current_app.config["INVALID_PDF_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_test_noti_with_failed_validation( - sample_precompiled_letter_notification_using_test_key, -): - sample_precompiled_letter_notification_using_test_key.status = NOTIFICATION_VALIDATION_FAILED - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification_using_test_key) - - assert bucket == current_app.config["INVALID_PDF_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification_using_test_key.reference).upper() - - -@pytest.mark.skip(reason="Letter tests") -def test_get_bucket_name_and_prefix_for_notification_invalid_notification(): - with pytest.raises(AttributeError): - get_bucket_name_and_prefix_for_notification(None) - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "crown_flag,expected_crown_text", - [ - (True, "C"), - (False, "N"), - ], -) -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_filename(notify_api, mocker, crown_flag, expected_crown_text): - filename = get_letter_pdf_filename(reference="foo", crown=crown_flag) - - assert filename == "2017-12-04/NOTIFY.FOO.D.2.C.{}.20171204172900.PDF".format(expected_crown_text) - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "postage,expected_postage", - [ - ("second", 2), - ("first", 1), - ], -) -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_postage_for_filename(notify_api, postage, expected_postage): - filename = get_letter_pdf_filename(reference="foo", crown=True, postage=postage) - - assert filename == "2017-12-04/NOTIFY.FOO.D.{}.C.C.20171204172900.PDF".format(expected_postage) - - -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_filename_for_test_letters(notify_api, mocker): - filename = get_letter_pdf_filename(reference="foo", crown="C", is_scan_letter=True) - - assert filename == "NOTIFY.FOO.D.2.C.C.20171204172900.PDF" - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-12-04 17:31:00") -@pytest.mark.skip(reason="Letter feature") -def test_get_letter_pdf_filename_returns_tomorrows_filename(notify_api, mocker): - filename = get_letter_pdf_filename(reference="foo", crown=True) - - assert filename == "2017-12-05/NOTIFY.FOO.D.2.C.C.20171204173100.PDF" - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -@pytest.mark.parametrize( - "bucket_config_name,filename_format", - [ - ("TEST_LETTERS_BUCKET_NAME", "NOTIFY.FOO.D.2.C.C.%Y%m%d%H%M%S.PDF"), - ("LETTERS_PDF_BUCKET_NAME", "%Y-%m-%d/NOTIFY.FOO.D.2.C.C.%Y%m%d%H%M%S.PDF"), - ], -) -@freeze_time(FROZEN_DATE_TIME) -def test_get_letter_pdf_gets_pdf_from_correct_bucket( - sample_precompiled_letter_notification_using_test_key, - bucket_config_name, - filename_format, -): - if bucket_config_name == "LETTERS_PDF_BUCKET_NAME": - sample_precompiled_letter_notification_using_test_key.key_type = KEY_TYPE_NORMAL - - bucket_name = current_app.config[bucket_config_name] - filename = datetime.utcnow().strftime(filename_format) - conn = boto3.resource("s3") - conn.create_bucket(Bucket=bucket_name) - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - ret = get_letter_pdf(sample_precompiled_letter_notification_using_test_key) - - assert ret == b"pdf_content" - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "is_precompiled_letter,bucket_config_name", - [(False, "LETTERS_PDF_BUCKET_NAME"), (True, "LETTERS_SCAN_BUCKET_NAME")], -) -def test_upload_letter_pdf_to_correct_bucket(sample_letter_notification, mocker, is_precompiled_letter, bucket_config_name): - if is_precompiled_letter: - sample_letter_notification.template.hidden = True - sample_letter_notification.template.name = PRECOMPILED_TEMPLATE_NAME - - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - filename = get_letter_pdf_filename( - reference=sample_letter_notification.reference, - crown=sample_letter_notification.service.crown, - is_scan_letter=is_precompiled_letter, - ) - - upload_letter_pdf(sample_letter_notification, b"\x00\x01", precompiled=is_precompiled_letter) - - mock_s3.assert_called_once_with( - bucket_name=current_app.config[bucket_config_name], - file_location=filename, - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize("postage,expected_postage", [("second", 2), ("first", 1)]) -def test_upload_letter_pdf_uses_postage_from_notification(sample_letter_template, mocker, postage, expected_postage): - letter_notification = save_notification(create_notification(template=sample_letter_template, postage=postage)) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - filename = get_letter_pdf_filename( - reference=letter_notification.reference, - crown=letter_notification.service.crown, - is_scan_letter=False, - postage=letter_notification.postage, - ) - - upload_letter_pdf(letter_notification, b"\x00\x01", precompiled=False) - - mock_s3.assert_called_once_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - file_location=filename, - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_move_failed_pdf_error(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - move_failed_pdf(filename, ScanErrorType.ERROR) - - assert "ERROR/" + filename in [o.key for o in bucket.objects.all()] - assert filename not in [o.key for o in bucket.objects.all()] - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_move_failed_pdf_scan_failed(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - move_failed_pdf(filename, ScanErrorType.FAILURE) - - assert "FAILURE/" + filename in [o.key for o in bucket.objects.all()] - assert filename not in [o.key for o in bucket.objects.all()] - - -@pytest.mark.skip(reason="Letter tests") -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_copy_redaction_failed_pdf(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - copy_redaction_failed_pdf(filename) - - assert "REDACTION_FAILURE/" + filename in [o.key for o in bucket.objects.all()] - assert filename in [o.key for o in bucket.objects.all()] - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "freeze_date, expected_folder_name", - [ - ("2018-04-01 17:50:00", "2018-04-02/"), - ("2018-07-02 16:29:00", "2018-07-02/"), - ("2018-07-02 16:30:00", "2018-07-02/"), - ("2018-07-02 16:31:00", "2018-07-03/"), - ("2018-01-02 16:31:00", "2018-01-02/"), - ("2018-01-02 17:31:00", "2018-01-03/"), - ("2018-07-02 22:30:00", "2018-07-03/"), - ("2018-07-02 23:30:00", "2018-07-03/"), - ("2018-07-03 00:30:00", "2018-07-03/"), - ("2018-01-02 22:30:00", "2018-01-03/"), - ("2018-01-02 23:30:00", "2018-01-03/"), - ("2018-01-03 00:30:00", "2018-01-03/"), - ], -) -@pytest.mark.skip(reason="Letter feature") -def test_get_folder_name_in_british_summer_time(notify_api, freeze_date, expected_folder_name): - with freeze_time(freeze_date): - now = datetime.utcnow() - folder_name = get_folder_name(_now=now, is_test_or_scan_letter=False) - assert folder_name == expected_folder_name - - -@pytest.mark.skip(reason="Letter tests") -def test_get_folder_name_returns_empty_string_for_test_letter(): - assert "" == get_folder_name(datetime.utcnow(), is_test_or_scan_letter=True) - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-07-07 20:00:00") -@pytest.mark.skip(reason="Letter feature") -def test_letter_print_day_returns_today_if_letter_was_printed_after_1730_yesterday(): - created_at = datetime(2017, 7, 6, 17, 30) - assert letter_print_day(created_at) == "today" - - -@pytest.mark.skip(reason="Letter tests") -@freeze_time("2017-07-07 16:30:00") -def test_letter_print_day_returns_today_if_letter_was_printed_today(): - created_at = datetime(2017, 7, 7, 12, 0) - assert letter_print_day(created_at) == "today" - - -@pytest.mark.skip(reason="Letter tests") -@pytest.mark.parametrize( - "created_at, formatted_date", - [ - (datetime(2017, 7, 5, 16, 30), "on 6 July"), - (datetime(2017, 7, 6, 16, 29), "on 6 July"), - (datetime(2016, 8, 8, 10, 00), "on 8 August"), - (datetime(2016, 12, 12, 17, 29), "on 12 December"), - (datetime(2016, 12, 12, 17, 30), "on 13 December"), - ], -) -@freeze_time("2017-07-07 16:30:00") -@pytest.mark.skip(reason="Letter feature") -def test_letter_print_day_returns_formatted_date_if_letter_printed_before_1730_yesterday(created_at, formatted_date): - assert letter_print_day(created_at) == formatted_date diff --git a/tests/app/letters/test_returned_letters.py b/tests/app/letters/test_returned_letters.py deleted file mode 100644 index 7cdf223641..0000000000 --- a/tests/app/letters/test_returned_letters.py +++ /dev/null @@ -1,27 +0,0 @@ -import pytest - - -@pytest.mark.skip(reason="Deprecated: LETTER CODE") -@pytest.mark.parametrize( - "status, references", - [ - (200, ["1234567890ABCDEF", "1234567890ABCDEG"]), - (400, ["1234567890ABCDEFG", "1234567890ABCDEG"]), - (400, ["1234567890ABCDE", "1234567890ABCDEG"]), - (400, ["1234567890ABCDE\u26d4", "1234567890ABCDEG"]), - (400, ["NOTIFY0001234567890ABCDEF", "1234567890ABCDEG"]), - ], -) -def test_process_returned_letters(status, references, admin_request, mocker): - mock_celery = mocker.patch("app.letters.rest.process_returned_letters_list.apply_async") - - response = admin_request.post( - "letter-job.create_process_returned_letters_job", - _data={"references": references}, - _expected_status=status, - ) - - if status != 200: - assert "{} does not match".format(references[0]) in response["errors"][0]["message"] - else: - mock_celery.assert_called_once_with([references], queue="database-tasks") diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index d7eae2d564..4c2069a6e2 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -38,7 +38,6 @@ Notification, Service, ServiceEmailReplyTo, - ServiceLetterContact, ServicePermission, ServiceSmsSender, User, @@ -57,7 +56,6 @@ create_ft_notification_status, create_inbound_number, create_letter_branding, - create_letter_contact, create_notification, create_organisation, create_reply_to_email, @@ -458,7 +456,7 @@ def test_create_service_with_domain_sets_organisation(admin_request, sample_user assert json_resp["data"]["organisation"] is None -def test_create_service_inherits_branding_from_organisation(admin_request, sample_user, mocker): +def test_create_service_doesnt_inherit_branding_from_organisation(admin_request, sample_user, mocker): org = create_organisation() email_branding = create_email_branding() org.email_branding = email_branding @@ -482,8 +480,7 @@ def test_create_service_inherits_branding_from_organisation(admin_request, sampl _expected_status=201, ) - assert json_resp["data"]["email_branding"] == str(email_branding.id) - assert json_resp["data"]["letter_branding"] == str(letter_branding.id) + assert json_resp["data"]["email_branding"] is None def test_should_not_create_service_with_missing_user_id_field(notify_api, fake_uuid): @@ -663,47 +660,6 @@ def test_cant_update_service_org_type_to_random_value(client, sample_service): assert resp.status_code == 500 -def test_update_service_letter_branding(client, notify_db, sample_service): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - data = {"letter_branding": str(letter_branding.id)} - - auth_header = create_authorization_header() - - resp = client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - result = resp.json - assert resp.status_code == 200 - assert result["data"]["letter_branding"] == str(letter_branding.id) - - -def test_update_service_remove_letter_branding(client, notify_db, sample_service): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - sample_service - data = {"letter_branding": str(letter_branding.id)} - - auth_header = create_authorization_header() - - client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - data = {"letter_branding": None} - resp = client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - result = resp.json - assert resp.status_code == 200 - assert result["data"]["letter_branding"] is None - - def test_update_service_remove_email_branding(admin_request, notify_db, sample_service): brand = EmailBranding(colour="#000000", logo="justice-league.png", name="Justice League") sample_service.email_branding = brand @@ -2430,25 +2386,6 @@ def test_send_one_off_notification(sample_service, admin_request, mocker): assert response["id"] == str(noti.id) -def test_create_pdf_letter(mocker, sample_service_full_permissions, client, fake_uuid, notify_user): - mocker.patch("app.service.send_notification.utils_s3download") - mocker.patch("app.service.send_notification.get_page_count", return_value=1) - mocker.patch("app.service.send_notification.move_uploaded_pdf_to_letters_bucket") - - user = sample_service_full_permissions.users[0] - data = json.dumps({"filename": "valid.pdf", "created_by": str(user.id), "file_id": fake_uuid}) - - response = client.post( - url_for("service.create_pdf_letter", service_id=sample_service_full_permissions.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - json_resp = json.loads(response.get_data(as_text=True)) - - assert response.status_code == 201 - assert json_resp == {"id": fake_uuid} - - def test_get_notification_for_service_includes_template_redacted(admin_request, sample_notification): resp = admin_request.get( "service.get_notification_for_service", @@ -2964,218 +2901,6 @@ def test_get_email_reply_to_address(client, notify_db, notify_db_session): assert json.loads(response.get_data(as_text=True)) == reply_to.serialize() -def test_get_letter_contacts_when_there_are_no_letter_contacts(client, sample_service): - response = client.get( - "/service/{}/letter-contact".format(sample_service.id), - headers=[create_authorization_header()], - ) - - assert json.loads(response.get_data(as_text=True)) == [] - assert response.status_code == 200 - - -def test_get_letter_contacts_with_one_letter_contact(client, notify_db, notify_db_session): - service = create_service() - create_letter_contact(service, "Aberdeen, AB23 1XH") - - response = client.get( - "/service/{}/letter-contact".format(service.id), - headers=[create_authorization_header()], - ) - json_response = json.loads(response.get_data(as_text=True)) - - assert len(json_response) == 1 - assert json_response[0]["contact_block"] == "Aberdeen, AB23 1XH" - assert json_response[0]["is_default"] - assert json_response[0]["created_at"] - assert not json_response[0]["updated_at"] - assert response.status_code == 200 - - -def test_get_letter_contacts_with_multiple_letter_contacts(client, notify_db, notify_db_session): - service = create_service() - letter_contact_a = create_letter_contact(service, "Aberdeen, AB23 1XH") - letter_contact_b = create_letter_contact(service, "London, E1 8QS", False) - - response = client.get( - "/service/{}/letter-contact".format(service.id), - headers=[create_authorization_header()], - ) - json_response = json.loads(response.get_data(as_text=True)) - - assert len(json_response) == 2 - assert response.status_code == 200 - - assert json_response[0]["id"] == str(letter_contact_a.id) - assert json_response[0]["service_id"] == str(letter_contact_a.service_id) - assert json_response[0]["contact_block"] == "Aberdeen, AB23 1XH" - assert json_response[0]["is_default"] - assert json_response[0]["created_at"] - assert not json_response[0]["updated_at"] - - assert json_response[1]["id"] == str(letter_contact_b.id) - assert json_response[1]["service_id"] == str(letter_contact_b.service_id) - assert json_response[1]["contact_block"] == "London, E1 8QS" - assert not json_response[1]["is_default"] - assert json_response[1]["created_at"] - assert not json_response[1]["updated_at"] - - -def test_get_letter_contact_by_id(client, notify_db, notify_db_session): - service = create_service() - letter_contact = create_letter_contact(service, "London, E1 8QS") - - response = client.get( - "/service/{}/letter-contact/{}".format(service.id, letter_contact.id), - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 200 - assert json.loads(response.get_data(as_text=True)) == letter_contact.serialize() - - -def test_get_letter_contact_return_404_when_invalid_contact_id(client, notify_db, notify_db_session): - service = create_service() - - response = client.get( - "/service/{}/letter-contact/{}".format(service.id, "93d59f88-4aa1-453c-9900-f61e2fc8a2de"), - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - - -def test_add_service_contact_block(client, sample_service): - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 201 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 1 - assert json_resp["data"] == results[0].serialize() - - -def test_add_service_letter_contact_can_add_multiple_addresses(client, sample_service): - first = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=first, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - second = json.dumps({"contact_block": "Aberdeen, AB23 1XH", "is_default": True}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=second, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 201 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 2 - default = [x for x in results if x.is_default] - assert json_resp["data"] == default[0].serialize() - first_letter_contact_not_default = [x for x in results if not x.is_default] - assert first_letter_contact_not_default[0].contact_block == "London, E1 8QS" - - -def test_add_service_letter_contact_block_fine_if_no_default(client, sample_service): - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": False}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 201 - - -def test_add_service_letter_contact_block_404s_when_invalid_service_id(client, notify_db, notify_db_session): - response = client.post( - "/service/{}/letter-contact".format(uuid.uuid4()), - data={}, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - result = json.loads(response.get_data(as_text=True)) - assert result["result"] == "error" - assert result["message"] == "No result found" - - -def test_update_service_letter_contact(client, sample_service): - original_letter_contact = create_letter_contact(service=sample_service, contact_block="Aberdeen, AB23 1XH") - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - response = client.post( - "/service/{}/letter-contact/{}".format(sample_service.id, original_letter_contact.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 200 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 1 - assert json_resp["data"] == results[0].serialize() - - -def test_update_service_letter_contact_returns_200_when_no_default(client, sample_service): - original_reply_to = create_letter_contact(service=sample_service, contact_block="Aberdeen, AB23 1XH") - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": False}) - response = client.post( - "/service/{}/letter-contact/{}".format(sample_service.id, original_reply_to.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 200 - - -def test_update_service_letter_contact_returns_404_when_invalid_service_id(client, notify_db, notify_db_session): - response = client.post( - "/service/{}/letter-contact/{}".format(uuid.uuid4(), uuid.uuid4()), - data={}, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - result = json.loads(response.get_data(as_text=True)) - assert result["result"] == "error" - assert result["message"] == "No result found" - - -def test_delete_service_letter_contact_can_archive_letter_contact(admin_request, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Edinburgh, ED1 1AA") - letter_contact = create_letter_contact(service=service, contact_block="Swansea, SN1 3CC", is_default=False) - - admin_request.post( - "service.delete_service_letter_contact", - service_id=service.id, - letter_contact_id=letter_contact.id, - ) - - assert letter_contact.archived is True - - -def test_delete_service_letter_contact_returns_200_if_archiving_template_default(admin_request, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Edinburgh, ED1 1AA") - letter_contact = create_letter_contact(service=service, contact_block="Swansea, SN1 3CC", is_default=False) - create_template(service=service, template_type="letter", reply_to=letter_contact.id) - - response = admin_request.post( - "service.delete_service_letter_contact", - service_id=service.id, - letter_contact_id=letter_contact.id, - _expected_status=200, - ) - assert response["data"]["archived"] is True - - def test_add_service_sms_sender_can_add_multiple_senders(client, notify_db_session): service = create_service() data = { @@ -3481,91 +3206,6 @@ def test_cancel_notification_for_service_raises_invalid_request_when_notificatio assert response["result"] == "error" -@pytest.mark.parametrize( - "notification_status", - [ - "cancelled", - "sending", - "sent", - "delivered", - "pending", - "failed", - "technical-failure", - "temporary-failure", - "permanent-failure", - "validation-failed", - "virus-scan-failed", - "returned-letter", - ], -) -@freeze_time("2018-07-07 12:00:00") -def test_cancel_notification_for_service_raises_invalid_request_when_letter_is_in_wrong_state_to_be_cancelled( - admin_request, - sample_letter_notification, - notification_status, -): - sample_letter_notification.status = notification_status - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - _expected_status=400, - ) - assert response["message"] == "It’s too late to cancel this letter. Printing started today at 5.30pm" - assert response["result"] == "error" - - -@pytest.mark.parametrize("notification_status", ["created", "pending-virus-check"]) -@freeze_time("2018-07-07 16:00:00") -def test_cancel_notification_for_service_updates_letter_if_letter_is_in_cancellable_state( - admin_request, - sample_letter_notification, - notification_status, -): - sample_letter_notification.status = notification_status - sample_letter_notification.created_at = datetime.now() - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - ) - assert response["status"] == "cancelled" - - -@freeze_time("2017-12-12 17:30:00") -def test_cancel_notification_for_service_raises_error_if_its_too_late_to_cancel( - admin_request, - sample_letter_notification, -): - sample_letter_notification.created_at = datetime(2017, 12, 11, 17, 0) - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - _expected_status=400, - ) - assert response["message"] == "It’s too late to cancel this letter. Printing started on 11 December at 5.30pm" - assert response["result"] == "error" - - -@freeze_time("2018-7-7 16:00:00") -def test_cancel_notification_for_service_updates_letter_if_still_time_to_cancel( - admin_request, - sample_letter_notification, -): - sample_letter_notification.created_at = datetime(2018, 7, 7, 10, 0) - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - ) - assert response["status"] == "cancelled" - - def test_get_monthly_notification_data_by_service(mocker, admin_request): dao_mock = mocker.patch( "app.service.rest.fact_notification_status_dao.fetch_monthly_notification_statuses_per_service", diff --git a/tests/app/service/test_send_pdf_letter_notification.py b/tests/app/service/test_send_pdf_letter_notification.py deleted file mode 100644 index b236c83cab..0000000000 --- a/tests/app/service/test_send_pdf_letter_notification.py +++ /dev/null @@ -1,111 +0,0 @@ -import uuid - -import pytest -from freezegun import freeze_time -from notifications_utils.s3 import S3ObjectNotFound - -from app.dao.notifications_dao import get_notification_by_id -from app.models import EMAIL_TYPE, LETTER_TYPE, UPLOAD_LETTERS -from app.service.send_notification import send_pdf_letter_notification -from app.v2.errors import BadRequestError, TooManyRequestsError -from tests.app.db import create_service - - -@pytest.mark.parametrize( - "permissions", - [ - [EMAIL_TYPE], - [LETTER_TYPE], - [UPLOAD_LETTERS], - ], -) -def test_send_pdf_letter_notification_raises_error_if_service_does_not_have_permission( - notify_db_session, - fake_uuid, - permissions, -): - service = create_service(service_permissions=permissions) - post_data = {"filename": "valid.pdf", "created_by": fake_uuid, "file_id": fake_uuid} - - with pytest.raises(BadRequestError): - send_pdf_letter_notification(service.id, post_data) - - -def test_send_pdf_letter_notification_raises_error_if_service_is_over_daily_message_limit( - mocker, - sample_service_full_permissions, - fake_uuid, -): - mocker.patch( - "app.service.send_notification.check_service_over_daily_message_limit", - side_effect=TooManyRequestsError(10), - ) - post_data = {"filename": "valid.pdf", "created_by": fake_uuid, "file_id": fake_uuid} - - with pytest.raises(TooManyRequestsError): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_validates_created_by(sample_service_full_permissions, fake_uuid, sample_user): - post_data = { - "filename": "valid.pdf", - "created_by": sample_user.id, - "file_id": fake_uuid, - } - - with pytest.raises(BadRequestError): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_raises_error_when_pdf_is_not_in_transient_letter_bucket( - mocker, - sample_service_full_permissions, - fake_uuid, - notify_user, -): - user = sample_service_full_permissions.users[0] - post_data = {"filename": "valid.pdf", "created_by": user.id, "file_id": fake_uuid} - mocker.patch( - "app.service.send_notification.utils_s3download", - side_effect=S3ObjectNotFound({}, ""), - ) - - with pytest.raises(S3ObjectNotFound): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -@freeze_time("2019-08-02 11:00:00") -def test_send_pdf_letter_notification_creates_notification_and_moves_letter( - mocker, - sample_service_full_permissions, - notify_user, -): - user = sample_service_full_permissions.users[0] - filename = "valid.pdf" - file_id = uuid.uuid4() - post_data = {"filename": filename, "created_by": user.id, "file_id": file_id} - - mocker.patch("app.service.send_notification.utils_s3download") - mocker.patch("app.service.send_notification.get_page_count", return_value=1) - s3_mock = mocker.patch("app.service.send_notification.move_uploaded_pdf_to_letters_bucket") - - result = send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - notification = get_notification_by_id(file_id) - - assert notification.id == file_id - assert notification.api_key_id is None - assert notification.client_reference == filename - assert notification.created_by_id == user.id - assert notification.postage == "second" - assert notification.notification_type == LETTER_TYPE - assert notification.billable_units == 1 - assert notification.to == filename - assert notification.service_id == sample_service_full_permissions.id - - assert result == {"id": str(notification.id)} - - s3_mock.assert_called_once_with( - "service-{}/{}.pdf".format(sample_service_full_permissions.id, file_id), - "2019-08-02/NOTIFY.{}.D.2.C.C.20190802110000.PDF".format(notification.reference), - ) diff --git a/tests/app/test_config.py b/tests/app/test_config.py index 5b25f4b093..e27ba129d1 100644 --- a/tests/app/test_config.py +++ b/tests/app/test_config.py @@ -27,7 +27,7 @@ def reload_config(): def test_queue_names_all_queues_correct(): # Need to ensure that all_queues() only returns queue names used in API queues = QueueNames.all_queues() - assert len(queues) == 23 + assert len(queues) == 22 assert set( [ QueueNames.PRIORITY, @@ -37,12 +37,10 @@ def test_queue_names_all_queues_correct(): QueueNames.PRIORITY_DATABASE, QueueNames.NORMAL_DATABASE, QueueNames.BULK_DATABASE, - QueueNames.SEND_SMS, QueueNames.SEND_SMS_HIGH, QueueNames.SEND_SMS_MEDIUM, QueueNames.SEND_SMS_LOW, QueueNames.SEND_THROTTLED_SMS, - QueueNames.SEND_EMAIL, QueueNames.SEND_EMAIL_HIGH, QueueNames.SEND_EMAIL_MEDIUM, QueueNames.SEND_EMAIL_LOW, @@ -50,6 +48,7 @@ def test_queue_names_all_queues_correct(): QueueNames.REPORTING, QueueNames.JOBS, QueueNames.RETRY, + QueueNames.CALLBACKS_RETRY, QueueNames.NOTIFY, # QueueNames.CREATE_LETTERS_PDF, QueueNames.CALLBACKS, diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 37c2806144..bdffcf4cdb 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -954,13 +954,18 @@ def test_send_contact_request_go_live_with_org_notes(organisation_notes, departm assert mock_contact_request.department_org_name == department_org_name -def test_send_branding_request(client, sample_service, mocker): +def test_send_branding_request(client, sample_service, sample_organisation, mocker): sample_user = sample_service.users[0] + sample_service.organisation = sample_organisation post_data = { "service_name": sample_service.name, "email_address": sample_user.email_address, "serviceID": str(sample_service.id), + "organisation_id": str(sample_service.organisation.id), + "organisation_name": sample_service.organisation.name, "filename": "branding_url", + "alt_text_en": "hello world", + "alt_text_fr": "bonjour", } mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index 84b868168d..cc33c4d527 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -26,6 +26,7 @@ ApiKey, Notification, ScheduledNotification, + ServiceSmsSender, ) from app.schema_validation import validate from app.utils import get_document_url @@ -1515,6 +1516,8 @@ def __send_sms(): key_type=key_type, ) save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" with set_config_values(notify_api, {"REDIS_ENABLED": True}): response = client.post( @@ -1522,7 +1525,7 @@ def __send_sms(): data=json.dumps(data), headers=[ ("Content-Type", "application/json"), - ("Authorization", f"ApiKey-v1 {get_unsigned_secret(api_key.id)}"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), ], ) return response @@ -1563,6 +1566,8 @@ def __send_sms(): key_type=key_type, ) save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" with set_config_values(notify_api, {"REDIS_ENABLED": True}): response = client.post( @@ -1570,7 +1575,7 @@ def __send_sms(): data=json.dumps(data), headers=[ ("Content-Type", "application/json"), - ("Authorization", f"ApiKey-v1 {get_unsigned_secret(api_key.id)}"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), ], ) return response @@ -1607,6 +1612,8 @@ def __send_sms(): key_type=key_type, ) save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" with set_config_values(notify_api, {"REDIS_ENABLED": True}): response = client.post( @@ -1614,7 +1621,7 @@ def __send_sms(): data=json.dumps(data), headers=[ ("Content-Type", "application/json"), - ("Authorization", f"ApiKey-v1 {get_unsigned_secret(api_key.id)}"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), ], ) return response @@ -2484,12 +2491,39 @@ def test_post_bulk_creates_job_and_dispatches_celery_task( } } + def test_post_bulk_sms_sets_sender_id_from_database( + self, + client, + mocker, + notify_user, + notify_api, + ): + service = create_service_with_inbound_number(inbound_number="12345") + template = create_template(service=service) + sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first() + data = {"name": "job_name", "template_id": template.id, "rows": [["phone number"], ["6135550111"]]} + job_id = str(uuid.uuid4()) + mocker.patch("app.v2.notifications.post_notifications.upload_job_to_s3", return_value=job_id) + mocker.patch("app.v2.notifications.post_notifications.process_job.apply_async") + + client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=service.id), + ], + ) + + job = dao_get_job_by_id(job_id) + assert job.sender_id == sms_sender.id + def test_post_bulk_with_too_large_sms_fails(self, client, notify_db, notify_db_session, mocker): mocker.patch("app.sms_normal_publish.publish") mocker.patch("app.v2.notifications.post_notifications.create_bulk_job", return_value=str(uuid.uuid4())) service = create_service(sms_daily_limit=10, message_limit=100) - template = create_sample_template(notify_db, notify_db_session, service=service, template_type="sms", content="a" * 612) + template = create_sample_template(notify_db, notify_db_session, service=service, template_type="sms", content="a" * 613) data = { "name": "job_name", "template_id": template.id, @@ -2540,7 +2574,7 @@ def test_post_bulk_with_too_large_sms_fail_and_shows_correct_row( ) assert response.status_code == 400 assert "has a character count greater than" in str(response.data) - assert "row #{}".format(failure_row) in str(response.data) + assert "Row {}".format(failure_row) in str(response.data) class TestBatchPriorityLanes: diff --git a/tests_cypress/package-lock.json b/tests_cypress/package-lock.json index 9ad62716b6..722e36d686 100644 --- a/tests_cypress/package-lock.json +++ b/tests_cypress/package-lock.json @@ -781,9 +781,9 @@ "integrity": "sha512-e8xL6YvnwRwN/1ey0aTQRbsE50rmUtT7WXPnr7ZjAUhTm/kRMKBzxmCZRqiGhy5Q4lOLOHlOkGFg2wCObvENcQ==" }, "cypress-recurse": { - "version": "1.35.2", - "resolved": "https://registry.npmjs.org/cypress-recurse/-/cypress-recurse-1.35.2.tgz", - "integrity": "sha512-G6HfxP90xa7phw8oeOX4uabxcI9gE1ktkKHShcA3nCByrkMLs56+GIJVn0A+ws1tI0PGRKBz6+V9DHS5WnZX4A==", + "version": "1.35.3", + "resolved": "https://registry.npmjs.org/cypress-recurse/-/cypress-recurse-1.35.3.tgz", + "integrity": "sha512-NbFOpEuZT4tFqAB0jQqel7WtVNDe8pvSHE2TfXvYk4pspf3wq98OC2RhhLn3bMnoCnPtY4IHO7e37c+CZ9HnMA==", "requires": { "humanize-duration": "^3.27.3" } @@ -1317,9 +1317,9 @@ "dev": true }, "humanize-duration": { - "version": "3.29.0", - "resolved": "https://registry.npmjs.org/humanize-duration/-/humanize-duration-3.29.0.tgz", - "integrity": "sha512-G5wZGwYTLaQAmYqhfK91aw3xt6wNbJW1RnWDh4qP1PvF4T/jnkjx2RVhG5kzB2PGsYGTn+oSDBQp+dMdILLxcg==" + "version": "3.32.0", + "resolved": "https://registry.npmjs.org/humanize-duration/-/humanize-duration-3.32.0.tgz", + "integrity": "sha512-6WsXYTHJr7hXKqoqf5zoWza/lANRAqGlbnZnm0cjDykbXuez1JVXOQGmq0EPB45pXYAJyueRA3S3hfhmMbrMEQ==" }, "iconv-lite": { "version": "0.6.3", @@ -1761,6 +1761,13 @@ "mailsplit": "5.4.0", "nodemailer": "6.9.8", "tlds": "1.248.0" + }, + "dependencies": { + "nodemailer": { + "version": "6.9.8", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.8.tgz", + "integrity": "sha512-cfrYUk16e67Ks051i4CntM9kshRYei1/o/Gi8K1d+R34OIs21xdFnW7Pt7EucmVKA0LKtqUGNcjMZ7ehjl49mQ==" + } } }, "mailsplit": { @@ -1849,9 +1856,9 @@ } }, "nodemailer": { - "version": "6.9.8", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.8.tgz", - "integrity": "sha512-cfrYUk16e67Ks051i4CntM9kshRYei1/o/Gi8K1d+R34OIs21xdFnW7Pt7EucmVKA0LKtqUGNcjMZ7ehjl49mQ==" + "version": "6.9.13", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.13.tgz", + "integrity": "sha512-7o38Yogx6krdoBf3jCAqnIN4oSQFx+fMa0I7dK1D+me9kBxx12D+/33wSb+fhOCtIxvYJ+4x4IMEhmhCKfAiOA==" }, "npm-run-path": { "version": "4.0.1", diff --git a/tests_smoke/README.md b/tests_smoke/README.md new file mode 100644 index 0000000000..531de7879a --- /dev/null +++ b/tests_smoke/README.md @@ -0,0 +1,14 @@ +# Smoke Tests + +This repository contains a set of smoke tests for our application. Smoke testing, also known as "Build Verification Testing", is a type of software testing that comprises of a non-exhaustive set of tests that aim at ensuring that the most important functions work. The phrase 'smoke testing' comes from the hardware testing, where you plug in a new piece of hardware and turn it on for the first time. If it starts smoking, you know you have a problem. + +## Getting Started + +These smoke tests are designed to run in the api devcontainer. + +in the root of the repo create `.env` files for the environments you with to smoke test, for example `.env_smoke_local`, `.env_smoke_staging`, and `.env_smoke_prod`. For required values see the [.env.example](.env.example) file). + +## Running the tests + +in the devcontainer run the aliases `smoke-local`, `smoke-staging`, or `smoke-prod` to run the tests. + diff --git a/tests_smoke/send_many.py b/tests_smoke/send_many.py new file mode 100644 index 0000000000..62adca2ecd --- /dev/null +++ b/tests_smoke/send_many.py @@ -0,0 +1,72 @@ +import argparse +import time +from datetime import datetime + +import requests +from dotenv import load_dotenv +from smoke.common import ( # type: ignore + Config, + Notification_type, + create_jwt_token, + job_line, + rows_to_csv, + s3upload, + set_metadata_on_csv_upload, +) + +DEFAULT_JOB_SIZE = 50000 + + +def send_admin_csv(notification_type: Notification_type, job_size: int): + """Send a bulk job of notifications by uploading a CSV + + Args: + notification_type (Notification_type): email or sms + job_size (int): number of notifications to send + """ + + template_id = Config.EMAIL_TEMPLATE_ID if notification_type == Notification_type.EMAIL else Config.SMS_TEMPLATE_ID + to = Config.EMAIL_TO if notification_type == Notification_type.EMAIL else Config.SMS_TO + header = "email address" if notification_type == Notification_type.EMAIL else "phone number" + + csv = rows_to_csv([[header, "var"], *job_line(to, job_size)]) + upload_id = s3upload(Config.SERVICE_ID, csv) + metadata_kwargs = { + "notification_count": 1, + "template_id": template_id, + "valid": True, + "original_file_name": f"Large send {datetime.utcnow().isoformat()}.csv", + } + set_metadata_on_csv_upload(Config.SERVICE_ID, upload_id, **metadata_kwargs) + + token = create_jwt_token(Config.ADMIN_CLIENT_SECRET, client_id=Config.ADMIN_CLIENT_USER_NAME) + response = requests.post( + f"{Config.API_HOST_NAME}/service/{Config.SERVICE_ID}/job", + json={"id": upload_id, "created_by": Config.USER_ID}, + headers={"Authorization": f"Bearer {token}"}, + ) + if response.status_code != 201: + print(response.json()) + print("FAILED: post to start send failed") + exit(1) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-n", "--notifications", default=1, type=int, help="total number of notifications") + parser.add_argument("-j", "--job_size", default=DEFAULT_JOB_SIZE, type=int, help=f"size of bulk send jobs (default {DEFAULT_JOB_SIZE})") + parser.add_argument("--sms", default=False, action='store_true', help="send sms instead of emails") + + args = parser.parse_args() + load_dotenv() + + notification_type = Notification_type.SMS if args.sms else Notification_type.EMAIL + for start_n in range(0, args.notifications, args.job_size): + num_sending = min(args.notifications - start_n, args.job_size) + print(f"Sending {start_n} - {start_n + num_sending - 1} of {args.notifications}") + send_admin_csv(notification_type, num_sending) + time.sleep(1) + + +if __name__ == "__main__": + main() diff --git a/tests_smoke/smoke/common.py b/tests_smoke/smoke/common.py index 8c189baf19..f248bc8926 100644 --- a/tests_smoke/smoke/common.py +++ b/tests_smoke/smoke/common.py @@ -2,8 +2,6 @@ import json import os import time - -# from notifications_utils.s3 import s3upload as utils_s3upload import urllib import uuid from enum import Enum @@ -16,6 +14,10 @@ from dotenv import load_dotenv from notifications_python_client.authentication import create_jwt_token +# from app/config.py +INTERNAL_TEST_NUMBER = "+16135550123" +INTERNAL_TEST_EMAIL_ADDRESS = "internal.test@cds-snc.ca" + load_dotenv() @@ -32,11 +34,12 @@ class Config: AWS_SECRET_ACCESS_KEY = os.environ.get("SMOKE_AWS_SECRET_ACCESS_KEY") SERVICE_ID = os.environ.get("SMOKE_SERVICE_ID", "") USER_ID = os.environ.get("SMOKE_USER_ID") - EMAIL_TO = os.environ.get("SMOKE_EMAIL_TO", "") - SMS_TO = os.environ.get("SMOKE_SMS_TO", "") + EMAIL_TO = os.environ.get("SMOKE_EMAIL_TO", INTERNAL_TEST_EMAIL_ADDRESS) + SMS_TO = os.environ.get("SMOKE_SMS_TO", INTERNAL_TEST_NUMBER) EMAIL_TEMPLATE_ID = os.environ.get("SMOKE_EMAIL_TEMPLATE_ID") SMS_TEMPLATE_ID = os.environ.get("SMOKE_SMS_TEMPLATE_ID") API_KEY = os.environ.get("SMOKE_API_KEY", "") + JOB_SIZE = int(os.environ.get("SMOKE_JOB_SIZE", 2)) boto_session = Session( @@ -63,8 +66,8 @@ def rows_to_csv(rows: List[List[str]]): return output.getvalue() -def job_line(data: str, number_of_lines: int) -> Iterator[List[str]]: - return map(lambda n: [data, f"var{n}"], range(0, number_of_lines)) +def job_line(data: str, number_of_lines: int, prefix: str = "") -> Iterator[List[str]]: + return map(lambda n: [data, f"{prefix} {n}"], range(0, number_of_lines)) def pretty_print(data: Any): @@ -116,7 +119,6 @@ def job_succeeded(service_id: str, job_id: str) -> bool: return success -# from notifications_utils.s3 import s3upload as utils_s3upload def utils_s3upload(filedata, region, bucket_name, file_location, content_type="binary/octet-stream", tags=None): _s3 = boto_session.resource("s3") diff --git a/tests_smoke/smoke/test_admin_csv.py b/tests_smoke/smoke/test_admin_csv.py index fc2d035a0a..e6f962266f 100644 --- a/tests_smoke/smoke/test_admin_csv.py +++ b/tests_smoke/smoke/test_admin_csv.py @@ -13,13 +13,13 @@ ) -def test_admin_csv(notification_type: Notification_type): +def test_admin_csv(notification_type: Notification_type, local: bool = False): print(f"test_admin_csv ({notification_type.value})... ", end="", flush=True) if notification_type == Notification_type.EMAIL: - data = rows_to_csv([["email address", "var"], *job_line(Config.EMAIL_TO, 2)]) + data = rows_to_csv([["email address", "var"], *job_line(Config.EMAIL_TO, Config.JOB_SIZE, prefix="smoke test admin csv")]) else: - data = rows_to_csv([["phone number", "var"], *job_line(Config.SMS_TO, 2)]) + data = rows_to_csv([["phone number", "var"], *job_line(Config.SMS_TO, Config.JOB_SIZE, prefix="smoke test admin csv")]) upload_id = s3upload(Config.SERVICE_ID, data) metadata_kwargs = { @@ -42,8 +42,11 @@ def test_admin_csv(notification_type: Notification_type): print("FAILED: post to send_notification failed") exit(1) - success = job_succeeded(Config.SERVICE_ID, upload_id) - if not success: - print("FAILED: job didn't finish successfully") - exit(1) - print("Success") + if local: + print(f"Check manually for {Config.JOB_SIZE} {notification_type.value}s") + else: + success = job_succeeded(Config.SERVICE_ID, upload_id) + if not success: + print("FAILED: job didn't finish successfully") + exit(1) + print("Success") diff --git a/tests_smoke/smoke/test_admin_one_off.py b/tests_smoke/smoke/test_admin_one_off.py index 8d52ea55a6..faaee84c92 100644 --- a/tests_smoke/smoke/test_admin_one_off.py +++ b/tests_smoke/smoke/test_admin_one_off.py @@ -4,7 +4,7 @@ from .common import Config, Notification_type, pretty_print, single_succeeded -def test_admin_one_off(notification_type: Notification_type): +def test_admin_one_off(notification_type: Notification_type, local: bool = False): print(f"test_admin_one_off ({notification_type.value})... ", end="", flush=True) token = create_jwt_token(Config.ADMIN_CLIENT_SECRET, client_id=Config.ADMIN_CLIENT_USER_NAME) @@ -17,7 +17,7 @@ def test_admin_one_off(notification_type: Notification_type): "to": to, "template_id": template_id, "created_by": Config.USER_ID, - "personalisation": {"var": "var"}, + "personalisation": {"var": "smoke test admin one off"}, }, headers={"Authorization": f"Bearer {token}"}, ) @@ -28,9 +28,12 @@ def test_admin_one_off(notification_type: Notification_type): print("FAILED: post to send_notification failed") exit(1) - uri = f"{Config.API_HOST_NAME}/service/{Config.SERVICE_ID}/notifications/{body['id']}" - success = single_succeeded(uri, use_jwt=True) - if not success: - print("FAILED: job didn't finish successfully") - exit(1) - print("Success") + if local: + print(f"Check manually for 1 {notification_type.value}") + else: + uri = f"{Config.API_HOST_NAME}/service/{Config.SERVICE_ID}/notifications/{body['id']}" + success = single_succeeded(uri, use_jwt=True) + if not success: + print("FAILED: job didn't finish successfully") + exit(1) + print("Success") diff --git a/tests_smoke/smoke/test_api_bulk.py b/tests_smoke/smoke/test_api_bulk.py index e31455589d..91897770cf 100644 --- a/tests_smoke/smoke/test_api_bulk.py +++ b/tests_smoke/smoke/test_api_bulk.py @@ -12,7 +12,7 @@ ) -def test_api_bulk(notification_type: Notification_type): +def test_api_bulk(notification_type: Notification_type, local: bool = False): print(f"test_api_bulk ({notification_type.value})... ", end="", flush=True) template_id = Config.EMAIL_TEMPLATE_ID if notification_type == Notification_type.EMAIL else Config.SMS_TEMPLATE_ID to = Config.EMAIL_TO if notification_type == Notification_type.EMAIL else Config.SMS_TO @@ -23,7 +23,7 @@ def test_api_bulk(notification_type: Notification_type): json={ "name": f"My bulk name {datetime.utcnow().isoformat()}", "template_id": template_id, - "csv": rows_to_csv([[header, "var"], *job_line(to, 2)]), + "csv": rows_to_csv([[header, "var"], *job_line(to, Config.JOB_SIZE, prefix="smoke test api bulk")]), }, headers={"Authorization": f"ApiKey-v1 {Config.API_KEY}"}, ) @@ -32,8 +32,11 @@ def test_api_bulk(notification_type: Notification_type): print("FAILED: post failed") exit(1) - success = job_succeeded(Config.SERVICE_ID, response.json()["data"]["id"]) - if not success: - print("FAILED: job didn't finish successfully") - exit(1) - print("Success") + if local: + print(f"Check manually for {Config.JOB_SIZE} {notification_type.value}s") + else: + success = job_succeeded(Config.SERVICE_ID, response.json()["data"]["id"]) + if not success: + print("FAILED: job didn't finish successfully") + exit(1) + print("Success") diff --git a/tests_smoke/smoke/test_api_one_off.py b/tests_smoke/smoke/test_api_one_off.py index 643192bb64..d4ab8e470a 100644 --- a/tests_smoke/smoke/test_api_one_off.py +++ b/tests_smoke/smoke/test_api_one_off.py @@ -11,7 +11,7 @@ ) -def test_api_one_off(notification_type: Notification_type, attachment_type: Attachment_type = Attachment_type.NONE): +def test_api_one_off(notification_type: Notification_type, attachment_type: Attachment_type = Attachment_type.NONE, local: bool = False): if attachment_type is Attachment_type.NONE: print(f"test_api_oneoff ({notification_type.value})... ", end="", flush=True) else: @@ -51,7 +51,7 @@ def test_api_one_off(notification_type: Notification_type, attachment_type: Atta } else: data["personalisation"] = { - "var": "var", + "var": "smoke test api one off", } response = requests.post( @@ -64,10 +64,12 @@ def test_api_one_off(notification_type: Notification_type, attachment_type: Atta print(f"FAILED: post to v2/notifications/{notification_type.value} failed") exit(1) - uri = response.json()["uri"] - - success = single_succeeded(uri, use_jwt=False) - if not success: - print("FAILED: job didn't finish successfully") - exit(1) - print("Success") + if local: + print(f"Check manually for 1 {notification_type.value}") + else: + uri = response.json()["uri"] + success = single_succeeded(uri, use_jwt=False) + if not success: + print("FAILED: job didn't finish successfully") + exit(1) + print("Success") diff --git a/tests_smoke/smoke_test.py b/tests_smoke/smoke_test.py index 2b9fc2399b..ccde49ddef 100644 --- a/tests_smoke/smoke_test.py +++ b/tests_smoke/smoke_test.py @@ -1,3 +1,5 @@ +import argparse + from smoke.common import Attachment_type, Config, Notification_type # type: ignore from smoke.test_admin_csv import test_admin_csv # type: ignore from smoke.test_admin_one_off import test_admin_one_off # type: ignore @@ -5,15 +7,22 @@ from smoke.test_api_one_off import test_api_one_off # type: ignore if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-l", "--local", default=False, action='store_true', help="run locally, do not check for delivery success (default false)") + parser.add_argument("--nofiles", default=False, action='store_true', help="do not send files (default false)") + args = parser.parse_args() + print("API Smoke test\n") for key in ["API_HOST_NAME", "SERVICE_ID", "EMAIL_TEMPLATE_ID", "SMS_TEMPLATE_ID", "EMAIL_TO", "SMS_TO"]: print(f"{key:>17}: {Config.__dict__[key]}") print("") for notification_type in [Notification_type.EMAIL, Notification_type.SMS]: - test_admin_one_off(notification_type) - test_admin_csv(notification_type) - test_api_one_off(notification_type) - test_api_bulk(notification_type) - test_api_one_off(Notification_type.EMAIL, Attachment_type.ATTACHED) - test_api_one_off(Notification_type.EMAIL, Attachment_type.LINK) + test_admin_one_off(notification_type, local=args.local) + test_admin_csv(notification_type, local=args.local) + test_api_one_off(notification_type, local=args.local) + test_api_bulk(notification_type, local=args.local) + + if not args.nofiles: + test_api_one_off(Notification_type.EMAIL, attachment_type=Attachment_type.ATTACHED, local=args.local) + test_api_one_off(Notification_type.EMAIL, attachment_type=Attachment_type.LINK, local=args.local)