diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 7d15566e7a..49963dcbfa 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,39 +1,69 @@ -FROM python:3.9 +FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.10@sha256:ef9cc483a593c95e1e83f2cf00b6a0e1ec7df43344416a41ccb3a88aef27beac -ARG USERNAME=vscode -ARG USER_UID=1000 -ARG USER_GID=$USER_UID +ARG KUBENS_VERSION="0.9.4" +ARG OCTANT_VERSION="0.25.1" +ENV POETRY_VERSION="1.7.1" +# Install packages RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - postgresql-client \ - && rm -rf /var/lib/apt/lists/* - -RUN apt-get update \ - && apt-get -y install --no-install-recommends apt-utils 2>&1 \ - && apt-get -y install git openssh-client less iproute2 procps lsb-release libsodium-dev \ - && groupadd --gid $USER_GID $USERNAME \ - && useradd -s /bin/bash --uid $USER_UID --gid $USER_GID -m $USERNAME \ - && apt-get install -y sudo \ - && apt-get -y install curl unzip net-tools emacs fd-find exa \ - && apt-get -y install manpages man-db tldr \ - && echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME\ - && chmod 0440 /etc/sudoers.d/$USERNAME \ + && apt-get -y install --no-install-recommends \ + apt-utils \ + postgresql-client \ + 2>&1 \ + && apt-get -y install \ + curl \ + dnsutils \ + emacs \ + exa \ + fd-find \ + git \ + iproute2 \ + less \ + libsodium-dev \ + lsb-release \ + man-db \ + manpages \ + net-tools \ + nodejs \ + npm \ + openssh-client \ + procps \ + sudo \ + tldr \ + unzip \ + vim \ + libgtk2.0-0 \ + libgtk-3-0 \ + libgbm-dev \ + libnotify-dev \ + libgconf-2-4 \ + libnss3 \ + libxss1 \ + libasound2 \ + libxtst6 \ + xauth \ + xvfb \ && apt-get autoremove -y \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* -RUN curl -O -J -L https://github.com/ryanoasis/nerd-fonts/releases/download/v2.1.0/FiraMono.zip \ - && unzip FiraMono.zip -d ~/.fonts \ - && rm FiraMono.zip \ - && fc-cache -fv +# Upgrade pip +RUN pip install --upgrade pip -RUN curl -fsSL https://starship.rs/install.sh | bash -s -- -y +# Install kubens +RUN git clone -b v${KUBENS_VERSION} --single-branch https://github.com/ahmetb/kubectx /opt/kubectx \ + && ln -s /opt/kubectx/kubectx /usr/local/bin/kubectx \ + && ln -s /opt/kubectx/kubens /usr/local/bin/kubens -COPY .devcontainer/scripts/notify-dev-entrypoint.sh /usr/local/bin/ +# Install Octant +RUN curl -Lo octant.tar.gz https://github.com/vmware-tanzu/octant/releases/download/v${OCTANT_VERSION}/octant_${OCTANT_VERSION}_Linux-64bit.tar.gz \ + && tar -xvf octant.tar.gz \ + && mv octant_${OCTANT_VERSION}_Linux-64bit/octant /usr/local/bin/ \ + && rm -rf octant_${OCTANT_VERSION}_Linux-64bit -EXPOSE 8000 +COPY .devcontainer/scripts/notify-dev-entrypoint.sh /usr/local/bin/ -RUN python -m pip install wheel +ENV SHELL /bin/zsh -RUN echo "eval '$(starship init bash)'" >> /root/.bashrc +EXPOSE 8000 +EXPOSE 6011 diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index d8ac6a0443..90716e9912 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,28 +1,65 @@ { - "name": "Python 3.9 & PostgreSQL", + "name": "notification-api", "dockerComposeFile": "docker-compose.yml", "service": "dev", "workspaceFolder": "/workspace", - - "settings": { - "python.linting.enabled": true, - "python.linting.pylintEnabled": true, - "python.linting.pylintPath": "/usr/local/bin/pylint", - "python.pythonPath": "/usr/local/bin/python", - "terminal.integrated.shell.linux": "/bin/bash", - "terminal.integrated.fontFamily": "FiraCode Nerd Font Mono" + "shutdownAction": "stopCompose", + "remoteEnv": { + "PATH": "/home/vscode/.local/bin:${containerEnv:PATH}" // give our installed Python modules precedence }, - - "extensions": [ - "donjayamanne.python-extension-pack", - "ms-azuretools.vscode-docker", - "ms-python.vscode-pylance", - "eamodio.gitlens", - "wholroyd.jinja", - "pmbenjamin.vscode-snyk", - "visualstudioexptteam.vscodeintellicode", - "yzhang.markdown-all-in-one", - "ms-ossdata.vscode-postgresql", - "googlecloudtools.cloudcode" - ] -} + "customizations": { + "vscode": { + "settings": { + "[python]": { + "editor.formatOnSave": true + }, + "python.formatting.blackPath": "/usr/local/bin/black", + "python.linting.enabled": true, + "python.linting.pylintEnabled": true, + "python.linting.pylintPath": "/usr/local/bin/pylint", + "python.pythonPath": "/usr/local/bin/python" + }, + "extensions": [ + "bungcip.better-toml", + "donjayamanne.python-extension-pack", + "eamodio.gitlens", + "GitHub.copilot", + "GitHub.copilot-labs", + "googlecloudtools.cloudcode", + "kaiwood.center-editor-window", + "matangover.mypy", + "ms-azuretools.vscode-docker", + "ms-ossdata.vscode-postgresql", + "ms-python.python", + "ms-python.vscode-pylance", + "ms-vsliveshare.vsliveshare", + "mtxr.sqltools", + "mtxr.sqltools-driver-pg", + "pmbenjamin.vscode-snyk", + "timonwong.shellcheck", + "usernamehw.errorlens", + "visualstudioexptteam.vscodeintellicode", + "wenfangdu.jump", + "wholroyd.jinja", + "yzhang.markdown-all-in-one" + ] + } + }, + "features": { + "docker-from-docker": { + "version": "latest", + "moby": true + }, + "kubectl-helm-minikube": { + "version": "latest", + "helm": "latest", + "minikube": "none" + }, + "ghcr.io/devcontainers/features/node:1": { + "version": "14.17.4" + } + }, + "postCreateCommand": "notify-dev-entrypoint.sh", + "remoteUser": "vscode", + +} \ No newline at end of file diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index ace50d049b..19b03edfa9 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -2,25 +2,27 @@ version: '3' services: dev: - build: + build: context: .. dockerfile: .devcontainer/Dockerfile - entrypoint: notify-dev-entrypoint.sh environment: SQLALCHEMY_DATABASE_URI: postgresql://postgres:chummy@db/notification_api + SQLALCHEMY_DATABASE_TEST_URI: postgresql://postgres:chummy@db/test_notification_api + REDIS_URL: redis://redis:6380 volumes: - - ..:/workspace:cached + - ..:/workspace:cached command: sleep infinity - ports: + ports: - 8000:8000 - 8001:8001 - links: + - 6011:6011 + links: - db db: - image: postgres:11.2 + image: postgres:11.22-bullseye@sha256:c886a3236b3d11abc302e64309186c90a69b49e53ccff23fd8c8b057b5b4bce9 volumes: - - ./initdb:/docker-entrypoint-initdb.d + - ./initdb:/docker-entrypoint-initdb.d restart: always command: - "postgres" @@ -31,6 +33,15 @@ services: POSTGRES_PASSWORD: chummy POSTGRES_HOST_AUTH_METHOD: trust expose: - - "5432" + - "5432" ports: - "5432:5432" + + redis: + image: redis:6.2@sha256:d4948d011cc38e94f0aafb8f9a60309bd93034e07d10e0767af534512cf012a9 + restart: always + command: redis-server --port 6380 + ports: + - "6380:6380" + expose: + - "6380" diff --git a/.devcontainer/scripts/notify-dev-entrypoint.sh b/.devcontainer/scripts/notify-dev-entrypoint.sh index 3819f89948..a2d1fa10de 100755 --- a/.devcontainer/scripts/notify-dev-entrypoint.sh +++ b/.devcontainer/scripts/notify-dev-entrypoint.sh @@ -1,39 +1,57 @@ #!/bin/bash -set -ex +set -ex ################################################################### -# This script will get executed *once* the Docker container has +# This script will get executed *once* the Docker container has # been built. Commands that need to be executed with all available -# tools and the filesystem mount enabled should be located here. +# tools and the filesystem mount enabled should be located here. ################################################################### -# We want to enable broadcast message which by default is disabled. -sed '/mesg/d' ~/.profile > ~/.profile.bak && mv ~/.profile.bak ~/.profile -echo -e "\ntest -t 0 && mesg n" >> ~/.profile - # Define aliases -echo -e "\n\n# User's Aliases" >> ~/.profile -echo -e "alias fd=fdfind" >> ~/.profile -echo -e "alias l='ls -al --color'" >> ~/.profile -echo -e "alias ls='exa'" >> ~/.profile -echo -e "alias l='exa -alh'" >> ~/.profile -echo -e "alias ll='exa -alh@ --git'" >> ~/.profile -echo -e "alias lt='exa -al -T -L 2'" >> ~/.profile - -cd /workspace - -# Warm up git index prior to display status in prompt else it will -# be quite slow on every invocation of starship. -git status +echo -e "\n\n# User's Aliases" >> ~/.zshrc +echo -e "alias fd=fdfind" >> ~/.zshrc +echo -e "alias l='ls -al --color'" >> ~/.zshrc +echo -e "alias ls='exa'" >> ~/.zshrc +echo -e "alias l='exa -alh'" >> ~/.zshrc +echo -e "alias ll='exa -alh@ --git'" >> ~/.zshrc +echo -e "alias lt='exa -al -T -L 2'" >> ~/.zshrc + +# Kubectl aliases and command autocomplete +echo -e "alias k='kubectl'" >> ~/.zshrc +echo -e "alias k-staging='aws eks --region ca-central-1 update-kubeconfig --name notification-canada-ca-staging-eks-cluster'" >> ~/.zshrc +echo -e "alias k-prod='aws eks --region ca-central-1 update-kubeconfig --name notification-canada-ca-production-eks-cluster'" >> ~/.zshrc +echo -e "source <(kubectl completion zsh)" >> ~/.zshrc +echo -e "complete -F __start_kubectl k" >> ~/.zshrc + +# Smoke test +# requires adding files .env_staging and .env_prod to the root of the project +echo -e "alias smoke-local='cd /workspace && cp .env_smoke_local tests_smoke/.env && poetry run make smoke-test-local'" >> ~/.zshrc +echo -e "alias smoke-staging='cd /workspace && cp .env_smoke_staging tests_smoke/.env && poetry run make smoke-test'" >> ~/.zshrc +echo -e "alias smoke-prod='cd /workspace && cp .env_smoke_prod tests_smoke/.env && poetry run make smoke-test'" >> ~/.zshrc + +cd /workspace + +# Poetry autocomplete +echo -e "fpath+=/.zfunc" >> ~/.zshrc +echo -e "autoload -Uz compinit && compinit" + +pip install poetry==${POETRY_VERSION} +export PATH=$PATH:/home/vscode/.local/bin/ +which poetry +poetry --version + +# Initialize poetry autocompletions +mkdir ~/.zfunc +touch ~/.zfunc/_poetry +poetry completions zsh > ~/.zfunc/_poetry make generate-version-file -pip3 install -r requirements.txt -pip3 install -r requirements_for_test.txt -# Upgrade schema of the notification_api database. -flask db upgrade +# Install dependencies +poetry install -wall "The dev container entrypoint setup is complete!" +# Upgrade schema of the notification_api database. +poetry run flask db upgrade -# Bubble up the main Docker command to container. -exec "$@" \ No newline at end of file +# install npm deps (i.e. cypress) +cd tests_cypress && npm install && npx cypress install && cd .. \ No newline at end of file diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..4f509e525f --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +*.env \ No newline at end of file diff --git a/.env.devcontainer b/.env.devcontainer new file mode 100644 index 0000000000..27fb527417 --- /dev/null +++ b/.env.devcontainer @@ -0,0 +1,8 @@ +NOTIFY_ENVIRONMENT=development + +REDIS_ENABLED="1" +REDIS_URL=redis://host.docker.internal:6380 + +FF_REDIS_BATCH_SAVING=true +FF_BATCH_INSERTION=true +FF_PRIORITY_LANES=true diff --git a/.env.enc.aws b/.env.enc.aws deleted file mode 100644 index 5c496d4640..0000000000 Binary files a/.env.enc.aws and /dev/null differ diff --git a/.env.example b/.env.example index e113c0d784..6557dd4a88 100644 --- a/.env.example +++ b/.env.example @@ -1,15 +1,10 @@ NOTIFY_ENVIRONMENT=development ADMIN_CLIENT_SECRET=dev-notify-secret-key +SRE_CLIENT_SECRET=dev-notify-secret-key SECRET_KEY=dev-notify-secret-key DANGEROUS_SALT=dev-notify-salt -MLWR_HOST='' -MLWR_USER='' -MLWR_KEY='' - -SENDGRID_API_KEY= - NOTIFICATION_QUEUE_PREFIX='notification-canada-ca' FLASK_APP=application.py @@ -21,3 +16,10 @@ AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= AWS_PINPOINT_REGION=us-west-2 +AWS_EMF_ENVIRONMENT=local + +CONTACT_FORM_EMAIL_ADDRESS = "" + +AWS_PINPOINT_SC_POOL_ID= +AWS_PINPOINT_SC_TEMPLATE_IDS= +AWS_PINPOINT_DEFAULT_POOL_ID= diff --git a/.github/workflows/backstage-catalog-helper.yml b/.github/workflows/backstage-catalog-helper.yml new file mode 100644 index 0000000000..2ed8456286 --- /dev/null +++ b/.github/workflows/backstage-catalog-helper.yml @@ -0,0 +1,37 @@ +name: Backstage Catalog Info Helper +on: + workflow_dispatch: + schedule: + - cron: "0 0 * * *" + +jobs: + update-catalog-info: + runs-on: ubuntu-latest + steps: + - name: Checkout Actions + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + with: + fetch-depth: 0 + - name: Run Backstage Catalog Info Helper + uses: cds-snc/backstage-catalog-info-helper-action@v0.3.1 + with: + github_app_id: ${{ secrets.SRE_BOT_RW_APP_ID }} + github_app_private_key: ${{ secrets.SRE_BOT_RW_PRIVATE_KEY }} + github_organization: cds-snc + - name: impersonate Read/Write GH App + uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a + id: generate_token + with: + app_id: ${{ secrets.SRE_BOT_RW_APP_ID }} + private_key: ${{ secrets.SRE_BOT_RW_PRIVATE_KEY }} + - name: Create pull request + uses: peter-evans/create-pull-request@v3 + with: + token: ${{ steps.generate_token.outputs.token}} + commit-message: 'Add catalog-info.yaml' + branch: 'backstage/catalog-info' + title: 'Add catalog-info.yaml' + body: 'Adding a basic catalog-info.yaml to start populating the backstage catalog with your components.' + labels: 'backstage' + add-paths: | + catalog-info.yaml \ No newline at end of file diff --git a/.github/workflows/build_and_push_performance_test.yml b/.github/workflows/build_and_push_performance_test.yml index fde1493806..326cec3389 100644 --- a/.github/workflows/build_and_push_performance_test.yml +++ b/.github/workflows/build_and_push_performance_test.yml @@ -4,9 +4,10 @@ name: Build and Push Performance Testing Container to ECR, deploy to lambda on: workflow_dispatch: push: + branches: [ "main" ] paths: - - 'tests-perf/**' - - 'tests_smoke/**' + - 'tests-perf/locust/**' + - 'tests-perf/ops/**' env: GITHUB_SHA: ${{ github.sha }} @@ -19,9 +20,9 @@ jobs: images: ${{ steps.filter.outputs.changes }} steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: dorny/paths-filter@v2 + - uses: dorny/paths-filter@7267a8516b6f92bdb098633497bad573efdbf271 # v2.12.0 id: filter with: filters: | @@ -40,7 +41,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Build container run: | @@ -52,7 +53,7 @@ jobs: - name: Configure AWS credentials id: aws-creds - uses: aws-actions/configure-aws-credentials@v1 + uses: aws-actions/configure-aws-credentials@5fd3084fc36e372ff1fff382a39b10d03659f355 # v2.2.0 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} @@ -60,7 +61,7 @@ jobs: - name: Login to ECR id: login-ecr - uses: aws-actions/amazon-ecr-login@v1 + uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1 - name: Push containers to ECR run: | @@ -69,3 +70,9 @@ jobs: - name: Logout of Amazon ECR run: docker logout ${{ steps.login-ecr.outputs.registry }} + + - name: Notify Slack channel if this job failed + if: ${{ failure() }} + run: | + json="{'text':' CI is failing in !'}" + curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }} diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000000..1027577cb7 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,41 @@ +name: "CodeQL" + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + schedule: + - cron: "29 4 * * 3" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ python ] + + steps: + - name: Checkout + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + + - name: Initialize CodeQL + uses: github/codeql-action/init@ffd3158cb9024ebd018dbf20756f28befbd168c7 # v2.24.10 + with: + languages: ${{ matrix.language }} + queries: +security-and-quality + + - name: Autobuild + uses: github/codeql-action/autobuild@ffd3158cb9024ebd018dbf20756f28befbd168c7 # v2.24.10 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@ffd3158cb9024ebd018dbf20756f28befbd168c7 # v2.24.10 + with: + category: "/language:${{ matrix.language }}" diff --git a/.github/workflows/docker-vulnerability-scan.yml b/.github/workflows/docker-vulnerability-scan.yml new file mode 100644 index 0000000000..43b3c05ee5 --- /dev/null +++ b/.github/workflows/docker-vulnerability-scan.yml @@ -0,0 +1,75 @@ +name: Docker vulnerability scan + +on: + workflow_dispatch: + schedule: + - cron: "0 4 * * *" + +permissions: + id-token: write # This is required for requesting the OIDC JWT + contents: read # This is required for actions/checkout + security-events: write # This is required for the docker-scan action + +jobs: + docker-vulnerability-scan-k8s: + runs-on: ubuntu-latest + env: + DOCKERFILE_PATH: "ci/Dockerfile" + DOCKER_IMAGE: "public.ecr.aws/v6b8u5o6/notify-api" + + steps: + - name: Configure credentials to CDS public ECR using OIDC + uses: aws-actions/configure-aws-credentials@5fd3084fc36e372ff1fff382a39b10d03659f355 # v2.2.0 + with: + role-to-assume: arn:aws:iam::283582579564:role/notification-api-apply + role-session-name: NotifyApiGitHubActions + aws-region: "us-east-1" + + - name: Login to ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1 + with: + registry-type: public + + - name: Docker vulnerability scan + uses: cds-snc/security-tools/.github/actions/docker-scan@eecd7a02a0294b379411c126b61e5c29e253676a # v2.1.4 + with: + docker_image: "${{ env.DOCKER_IMAGE }}:latest" + dockerfile_path: "${{ env.DOCKERFILE_PATH }}" + token: "${{ secrets.GITHUB_TOKEN }}" + + - name: Logout of Amazon ECR + run: docker logout ${{ steps.login-ecr.outputs.registry }} + + docker-vulnerability-scan-lambda: + runs-on: ubuntu-latest + env: + DOCKERFILE_PATH: "ci/Dockerfile.lambda" + DOCKER_IMAGE: "${{ secrets.PRODUCTION_API_LAMBDA_ECR_ACCOUNT }}.dkr.ecr.ca-central-1.amazonaws.com/notify/api-lambda" + + steps: + - name: Configure credentials to Notify account using OIDC + uses: aws-actions/configure-aws-credentials@5fd3084fc36e372ff1fff382a39b10d03659f355 # v2.2.0 + with: + role-to-assume: arn:aws:iam::${{ secrets.PRODUCTION_API_LAMBDA_ECR_ACCOUNT }}:role/notification-api-apply + role-session-name: NotifyApiGitHubActions + aws-region: "ca-central-1" + + - name: Get latest Docker image tag + run: | + IMAGE_TAG="$(aws ecr describe-images --output json --repository-name notify/api-lambda --query 'sort_by(imageDetails,& imagePushedAt)[-1].imageTags[0]' | jq . --raw-output)" + echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_ENV + + - name: Login to ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1 + + - name: Docker vulnerability scan + uses: cds-snc/security-tools/.github/actions/docker-scan@eecd7a02a0294b379411c126b61e5c29e253676a # v2.1.4 + with: + docker_image: "${{ env.DOCKER_IMAGE }}:${{ env.IMAGE_TAG }}" + dockerfile_path: "${{ env.DOCKERFILE_PATH }}" + token: "${{ secrets.GITHUB_TOKEN }}" + + - name: Logout of Amazon ECR + run: docker logout ${{ steps.login-ecr.outputs.registry }} diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml index d45ca1c788..89d32b59b0 100644 --- a/.github/workflows/docker.yaml +++ b/.github/workflows/docker.yaml @@ -2,41 +2,46 @@ name: Build, push to AWS ECR, and deploy on: push: branches: - - master + - main env: AWS_REGION: ca-central-1 DOCKER_ORG: public.ecr.aws/v6b8u5o6 DOCKER_SLUG: public.ecr.aws/v6b8u5o6/notify-api - KUBECTL_VERSION: '1.18.0' + KUBECTL_VERSION: '1.23.6' + WORKFLOW_PAT: ${{ secrets.WORKFLOW_GITHUB_PAT }} + OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }} + +permissions: + id-token: write # This is required for requesting the OIDC JWT + contents: read # This is required for actions/checkout jobs: deploy: runs-on: ubuntu-latest name: Build and push steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Install AWS CLI run: | curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" unzip -q awscliv2.zip sudo ./aws/install --update aws --version - - name: Install kubectl - run: | - curl -LO https://storage.googleapis.com/kubernetes-release/release/v$KUBECTL_VERSION/bin/linux/amd64/kubectl - chmod +x ./kubectl - sudo mv ./kubectl /usr/local/bin/kubectl - kubectl version --client - mkdir -p $HOME/.kube - - name: AWS auth with ECR - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ECR_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ECR_SECRET_ACCESS_KEY }} - run: | - aws ecr-public get-login-password --region us-east-1 > /tmp/aws - cat /tmp/aws | docker login --username AWS --password-stdin $DOCKER_ORG - rm /tmp/aws + + - name: Configure credentials to CDS public ECR using OIDC + uses: aws-actions/configure-aws-credentials@master + with: + role-to-assume: arn:aws:iam::283582579564:role/notification-api-apply + role-session-name: NotifyApiGitHubActions + aws-region: "us-east-1" + + - name: Login to ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1 + with: + registry-type: public + - name: Build run: | docker pull $DOCKER_SLUG:latest @@ -46,38 +51,79 @@ jobs: -t $DOCKER_SLUG:${GITHUB_SHA::7} \ -t $DOCKER_SLUG:latest \ -f ci/Dockerfile . + - name: Publish run: | docker push $DOCKER_SLUG:latest && docker push $DOCKER_SLUG:${GITHUB_SHA::7} - - name: Get Kubernetes configuration - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@5fd3084fc36e372ff1fff382a39b10d03659f355 # v2.2.0 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ca-central-1 + + - name: Install OpenVPN run: | - aws eks --region $AWS_REGION update-kubeconfig --name notification-canada-ca-staging-eks-cluster --kubeconfig $HOME/.kube/config + sudo apt update + sudo apt install -y openvpn openvpn-systemd-resolved + + - name: Install 1Pass CLI + run: | + curl -o 1pass.deb https://downloads.1password.com/linux/debian/amd64/stable/1password-cli-amd64-latest.deb + sudo dpkg -i 1pass.deb + + - name: One Password Fetch + run: | + op read op://4eyyuwddp6w4vxlabrr2i2duxm/"Staging Github Actions VPN"/notesPlain > /var/tmp/staging.ovpn + + - name: Connect to VPN + uses: "kota65535/github-openvpn-connect-action@cd2ed8a90cc7b060dc4e001143e811b5f7ea0af5" + with: + config_file: /var/tmp/staging.ovpn + client_key: ${{ secrets.STAGING_OVPN_CLIENT_KEY }} + echo_config: false + + - name: Configure kubeconfig + run: | + aws eks update-kubeconfig --name notification-canada-ca-staging-eks-cluster + - name: Update images in staging - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} run: | - kubectl set image deployment.apps/api api=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery celery=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-beat celery-beat=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config - kubectl set image deployment.apps/celery-sms celery-sms=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + DOCKER_TAG=${GITHUB_SHA::7} + kubectl set image deployment.apps/api api=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + kubectl set image deployment.apps/celery-beat celery-beat=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + kubectl set image deployment.apps/celery-sms celery-sms=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + kubectl set image deployment.apps/celery-primary celery-primary=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + kubectl set image deployment.apps/celery-scalable celery-scalable=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + kubectl set image deployment.apps/celery-sms-send-primary celery-sms-send-primary=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + kubectl set image deployment.apps/celery-sms-send-scalable celery-sms-send-scalable=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + kubectl set image deployment.apps/celery-email-send-primary celery-email-send-primary=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + kubectl set image deployment.apps/celery-email-send-scalable celery-email-send-scalable=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config + - name: my-app-install token id: notify-pr-bot - uses: getsentry/action-github-app-token@v1 + uses: getsentry/action-github-app-token@38a3ce582e170ddfe8789f509597c6944f2292a9 # v1.0.6 with: - app_id: ${{ secrets.PRBOT_APP_ID }} - private_key: ${{ secrets.PRBOT_PRIVATE_KEY }} + app_id: ${{ secrets.NOTIFY_PR_BOT_APP_ID }} + private_key: ${{ secrets.NOTIFY_PR_BOT_PRIVATE_KEY }} - - uses: cds-snc/notification-pr-bot@master + - uses: cds-snc/notification-pr-bot@main env: TOKEN: ${{ steps.notify-pr-bot.outputs.token }} + - name: Generate docker SBOM + uses: cds-snc/security-tools/.github/actions/generate-sbom@eecd7a02a0294b379411c126b61e5c29e253676a # v2.1.4 + with: + docker_image: "${{ env.DOCKER_SLUG }}:latest" + dockerfile_path: "ci/Dockerfile" + sbom_name: "notification-api" + token: "${{ secrets.GITHUB_TOKEN }}" + - name: Notify Slack channel if this job failed if: ${{ failure() }} run: | - json="{'text':' CI is failing in !'}" + json="{'text':' CI is failing in !'}" curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }} + diff --git a/.github/workflows/export_github_data.yml b/.github/workflows/export_github_data.yml new file mode 100644 index 0000000000..db92588487 --- /dev/null +++ b/.github/workflows/export_github_data.yml @@ -0,0 +1,25 @@ +name: GitHub repository metadata exporter +on: + workflow_dispatch: + schedule: + - cron: "20 7 * * *" + +jobs: + export-data: + runs-on: ubuntu-latest + steps: + - name: Audit DNS requests + uses: cds-snc/dns-proxy-action@main + env: + DNS_PROXY_FORWARDTOSENTINEL: "true" + DNS_PROXY_LOGANALYTICSWORKSPACEID: ${{ secrets.LOG_ANALYTICS_WORKSPACE_ID }} + DNS_PROXY_LOGANALYTICSSHAREDKEY: ${{ secrets.LOG_ANALYTICS_WORKSPACE_KEY }} + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Export Data + uses: cds-snc/github-repository-metadata-exporter@main + with: + github-app-id: ${{ secrets.SRE_BOT_RO_APP_ID }} + github-app-installation-id: ${{ secrets.SRE_BOT_RO_INSTALLATION_ID }} + github-app-private-key: ${{ secrets.SRE_BOT_RO_PRIVATE_KEY }} + log-analytics-workspace-id: ${{ secrets.LOG_ANALYTICS_WORKSPACE_ID }} + log-analytics-workspace-key: ${{ secrets.LOG_ANALYTICS_WORKSPACE_KEY }} diff --git a/.github/workflows/lambda_production.yml b/.github/workflows/lambda_production.yml index acde9b8f8e..9b3b7f2f11 100644 --- a/.github/workflows/lambda_production.yml +++ b/.github/workflows/lambda_production.yml @@ -4,7 +4,7 @@ name: Build and push lambda image to production on: workflow_dispatch: push: - branches: [master] + branches: [main] env: REGISTRY: ${{ secrets.PRODUCTION_API_LAMBDA_ECR_ACCOUNT }}.dkr.ecr.ca-central-1.amazonaws.com/notify @@ -19,31 +19,49 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v2 - - - name: Build container - run: | - docker build \ - --build-arg GIT_SHA=${GITHUB_SHA::7} \ - -t $REGISTRY/${{ matrix.image }}:${GITHUB_SHA::7} \ - . \ - -f ci/Dockerfile.lambda + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Configure AWS credentials id: aws-creds - uses: aws-actions/configure-aws-credentials@v1 + uses: aws-actions/configure-aws-credentials@5fd3084fc36e372ff1fff382a39b10d03659f355 # v2.2.0 with: aws-access-key-id: ${{ secrets.PRODUCTION_ECR_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.PRODUCTION_ECR_SECRET_ACCESS_KEY }} aws-region: ca-central-1 + - name: Set Docker image tag + run: | + echo "IMAGE_TAG=${GITHUB_SHA::7}" >> $GITHUB_ENV + + - name: Build container + run: | + docker build \ + --build-arg GIT_SHA=$IMAGE_TAG \ + -t $REGISTRY/${{ matrix.image }}:$IMAGE_TAG \ + . \ + -f ci/Dockerfile.lambda + - name: Login to ECR id: login-ecr - uses: aws-actions/amazon-ecr-login@v1 + uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1 - name: Push containers to ECR run: | - docker push $REGISTRY/${{ matrix.image }}:${GITHUB_SHA::7} + docker push $REGISTRY/${{ matrix.image }}:$IMAGE_TAG + + - name: Generate docker SBOM + uses: cds-snc/security-tools/.github/actions/generate-sbom@eecd7a02a0294b379411c126b61e5c29e253676a # v2.1.4 + with: + docker_image: "${{ env.REGISTRY }}/${{ matrix.image }}:${{ env.IMAGE_TAG }}" + dockerfile_path: "ci/Dockerfile.lambda" + sbom_name: "notification-api-lambda" + token: "${{ secrets.GITHUB_TOKEN }}" - name: Logout of Amazon ECR run: docker logout ${{ steps.login-ecr.outputs.registry }} + + - name: Notify Slack channel if this job failed + if: ${{ failure() }} + run: | + json="{'text':' CI is failing in !'}" + curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }} diff --git a/.github/workflows/lambda_staging.yml b/.github/workflows/lambda_staging.yml index f56327357a..9bd896bbe3 100644 --- a/.github/workflows/lambda_staging.yml +++ b/.github/workflows/lambda_staging.yml @@ -4,7 +4,7 @@ name: Build, push, and deploy lambda image to staging on: workflow_dispatch: push: - branches: [master] + branches: [main] env: REGISTRY: ${{ secrets.STAGING_API_LAMBDA_ECR_ACCOUNT }}.dkr.ecr.ca-central-1.amazonaws.com/notify @@ -19,7 +19,15 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + + - name: Configure AWS credentials + id: aws-creds + uses: aws-actions/configure-aws-credentials@5fd3084fc36e372ff1fff382a39b10d03659f355 # v2.2.0 + with: + aws-access-key-id: ${{ secrets.STAGING_ECR_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.STAGING_ECR_SECRET_ACCESS_KEY }} + aws-region: ca-central-1 - name: Build container run: | @@ -29,17 +37,9 @@ jobs: . \ -f ci/Dockerfile.lambda - - name: Configure AWS credentials - id: aws-creds - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.STAGING_ECR_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.STAGING_ECR_SECRET_ACCESS_KEY }} - aws-region: ca-central-1 - - name: Login to ECR id: login-ecr - uses: aws-actions/amazon-ecr-login@v1 + uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1 - name: Push containers to ECR run: | @@ -52,4 +52,20 @@ jobs: run: | aws lambda update-function-code \ --function-name ${{ matrix.image }} \ - --image-uri $REGISTRY/${{ matrix.image }}:${GITHUB_SHA::7} + --image-uri $REGISTRY/${{ matrix.image }}:${GITHUB_SHA::7} > /dev/null 2>&1 + + - name: Publish lambda version and update alias + run: | + aws lambda wait function-updated --function-name ${{ matrix.image }} + VERSION="$(aws lambda publish-version --function-name ${{ matrix.image }} | jq -r '.Version')" + + aws lambda update-alias \ + --function-name ${{ matrix.image }} \ + --name latest \ + --function-version "$VERSION" > /dev/null 2>&1 + + - name: Notify Slack channel if this job failed + if: ${{ failure() }} + run: | + json="{'text':' CI is failing in !'}" + curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }} diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml new file mode 100644 index 0000000000..bad8cdd45c --- /dev/null +++ b/.github/workflows/ossf-scorecard.yml @@ -0,0 +1,47 @@ +name: Scorecards supply-chain security +on: + workflow_dispatch: + schedule: + # Weekly on Saturdays. + - cron: "30 1 * * 6" + push: + branches: + - main + +permissions: read-all + +jobs: + analysis: + name: Scorecards analysis + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + + steps: + - name: "Checkout code" + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@8c9e2c1222f54716a1df7d7bbb245e2a045b4423 + with: + results_file: ossf-results.json + results_format: json + publish_results: false + + - name: "Add metadata" + run: | + full_repo="${{ github.repository }}" + OWNER=${full_repo%/*} + REPO=${full_repo#*/} + jq -c '. + {"metadata_owner": "'$OWNER'", "metadata_repo": "'$REPO'", "metadata_query": "ossf"}' ossf-results.json > ossf-results-modified.json + + - name: "Post results to Sentinel" + uses: cds-snc/sentinel-forward-data-action@main + with: + file_name: ossf-results-modified.json + log_type: GitHubMetadata_OSSF_Scorecard + log_analytics_workspace_id: ${{ secrets.LOG_ANALYTICS_WORKSPACE_ID }} + log_analytics_workspace_key: ${{ secrets.LOG_ANALYTICS_WORKSPACE_KEY }} diff --git a/.github/workflows/performance.yml b/.github/workflows/performance.yml index e019b48043..8c52bdd979 100644 --- a/.github/workflows/performance.yml +++ b/.github/workflows/performance.yml @@ -9,23 +9,23 @@ jobs: steps: - name: Install libcurl run: sudo apt-get update && sudo apt-get install libssl-dev libcurl4-openssl-dev - - uses: actions/checkout@v2 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + - name: Set up Python 3.10 + uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa # v4.8.0 with: - python-version: 3.9 + python-version: '3.10' - name: Upgrade pip run: python -m pip install --upgrade pip - - uses: actions/cache@v2 + - uses: actions/cache@e12d46a63a90f2fae62d114769bbf2a179198b5c # v3.3.3 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} restore-keys: | ${{ runner.os }}-pip- - name: Run performance tests - run: /bin/bash -c "pip install -r requirements_for_test.txt && locust --headless --config tests-perf/locust/locust.conf -f tests-perf/locust/locust-notifications.py" + run: /bin/bash -c "poetry install --with test && locust --headless --config tests-perf/locust/locust.conf -f tests-perf/locust/locust-notifications.py" - name: Notify Slack channel if this performance test job fails - if: ${{ failure() && github.ref == 'refs/heads/master' }} + if: ${{ failure() && github.ref == 'refs/heads/main' }} run: | - json="{'text':'Scheduled CI Performance testing failed: '}" + json="{'text':'Scheduled CI Performance testing failed: '}" curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }} diff --git a/.github/workflows/s3-backup.yml b/.github/workflows/s3-backup.yml new file mode 100644 index 0000000000..6a8e9670d1 --- /dev/null +++ b/.github/workflows/s3-backup.yml @@ -0,0 +1,39 @@ +name: S3 backup +on: + workflow_dispatch: + schedule: + - cron: "0 6 * * *" + +jobs: + s3-backup: + runs-on: ubuntu-latest + steps: + + - name: Checkout + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + fetch-depth: 0 # retrieve all history + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 + with: + aws-access-key-id: ${{ secrets.AWS_S3_BACKUP_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_S3_BACKUP_SECRET_ACCESS_KEY }} + aws-region: ca-central-1 + + - name: Create ZIP bundle + run: | + ZIP_FILE=`basename ${{ github.repository }}`-`date '+%Y-%m-%d'`.zip + zip -rq "${ZIP_FILE}" . + mkdir -p ${{ github.repository }} + mv "${ZIP_FILE}" ${{ github.repository }} + + - name: Upload to S3 bucket + run: | + aws s3 sync . s3://${{ secrets.AWS_S3_BACKUP_BUCKET }} --exclude='*' --include='${{ github.repository }}/*' + + - name: Notify Slack channel if this job failed + if: ${{ failure() }} + run: | + json='{"text":"S3 backup failed in !"}' + curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_NOTIFY_WEBHOOK }} diff --git a/.github/workflows/scripts/run-shellcheck.sh b/.github/workflows/scripts/run-shellcheck.sh new file mode 100755 index 0000000000..0826c1b0d9 --- /dev/null +++ b/.github/workflows/scripts/run-shellcheck.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +docker run --rm -v "$PWD:/mnt" koalaman/shellcheck:v0.9.0 -P ./bin/ -x ./scripts/*.sh \ No newline at end of file diff --git a/.github/workflows/secret.yaml b/.github/workflows/secret.yaml index 4141579662..f859b60f3a 100644 --- a/.github/workflows/secret.yaml +++ b/.github/workflows/secret.yaml @@ -5,6 +5,6 @@ jobs: name: seekret-scanning runs-on: ubuntu-latest steps: - - uses: actions/checkout@master + - uses: actions/checkout@main - name: docker://cdssnc/seekret-github-action - uses: docker://cdssnc/seekret-github-action + uses: docker://cdssnc/seekret-github-action@sha256:0aee6df949373ef6df26d35f6207b56f897ddd1caa030646d7421b0afb717665 diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml new file mode 100644 index 0000000000..7dd912760c --- /dev/null +++ b/.github/workflows/shellcheck.yml @@ -0,0 +1,14 @@ +name: Shellcheck +on: + push: + paths: + - "**/*.sh" + +jobs: + shellcheck: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@ee0669bd1cc54295c223e0bb666b733df41de1c5 # v2.7.0 + - name: Shellcheck + run: | + .github/workflows/scripts/run-shellcheck.sh diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 529a2e0547..4f9a621187 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -6,7 +6,7 @@ jobs: runs-on: ubuntu-latest services: postgres: - image: postgres:13.4 + image: postgres:11.22-bullseye@sha256:c886a3236b3d11abc302e64309186c90a69b49e53ccff23fd8c8b057b5b4bce9 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres @@ -18,23 +18,64 @@ jobs: steps: - name: Install libcurl run: sudo apt-get update && sudo apt-get install libssl-dev libcurl4-openssl-dev - - uses: actions/checkout@v2 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + - name: Set up Python 3.10 + uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa # v4.8.0 with: - python-version: 3.9 + python-version: '3.10' - name: Upgrade pip run: python -m pip install --upgrade pip - - uses: actions/cache@v2 + - uses: actions/cache@e12d46a63a90f2fae62d114769bbf2a179198b5c # v3.3.3 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} restore-keys: | ${{ runner.os }}-pip- + - name: Install poetry + env: + POETRY_VERSION: "1.7.1" + run: pip install poetry==${POETRY_VERSION} && poetry --version + - name: Check poetry.lock aligns with pyproject.toml + run: poetry check --lock + - name: Install requirements + run: poetry install --with test - name: Run tests - run: /bin/bash -c "pip install -r requirements_for_test.txt && make test" + run: poetry run make test + - name: Upload pytest logs on failure + if: ${{ failure() }} + uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 + with: + name: pytest-logs + path: | + pytest*.log + - name: Get python version + run: | + python_version=$(python -V | cut -d' ' -f2) + echo "python_version=${python_version}" >> $GITHUB_ENV + - name: Make version file + run: | + printf '__commit_sha__ = "09cfe03100443fb9071bba88d5c8775ff54a9ebc"\n__time__ = "2022-07-25:15:11:05"\n' > version.py + cp version.py "${{ github.workspace }}/app/" + - name: Copy site-packages in workspace + working-directory: ${{ github.workspace }} + shell: bash + run: | + mkdir -p "${{ github.workspace }}/env/" && cp -fR $(poetry env list | poetry env info -p)/lib/python3.10/site-packages "${{ github.workspace }}/env/" + - name: Install development .env file + working-directory: ${{ github.workspace }} + shell: bash + run: | + cp -f .env.example .env + - name: Checks for new endpoints against AWS WAF rules + uses: cds-snc/notification-utils/.github/actions/waffles@52.2.2 + with: + app-loc: '/github/workspace' + app-libs: '/github/workspace/env/site-packages' + flask-mod: 'application' + flask-prop: 'application' + base-url: 'https://api.staging.notification.cdssandbox.xyz' - name: Notify Slack channel if this job fails - if: ${{ failure() && github.ref == 'refs/heads/master' }} + if: ${{ failure() && github.ref == 'refs/heads/main' }} run: | - json="{'text':'Scheduled CI testing failed: '}" + json="{'text':'Scheduled CI testing failed: '}" curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }} diff --git a/.gitignore b/.gitignore index 60efedb146..08663fb12a 100644 --- a/.gitignore +++ b/.gitignore @@ -28,6 +28,9 @@ var/ .installed.cfg *.egg /cache +newrelic-layer.zip +smoketest.sh +smoketest-prod.sh # PyInstaller # Usually these files are written by a python script from a template @@ -49,6 +52,7 @@ htmlcov/ coverage.xml test_results.xml *,cover +test_results_serial.xml # Translations *.mo @@ -63,21 +67,19 @@ docs/_build/ # PyBuilder target/ .idea/ -.vscode +.vscode/settings.json # Mac *.DS_Store environment.sh -.envrc -.env +.env* +!.env.devcontainer +!.env.example celerybeat-schedule celerybeat-schedule.* celerybeat.pid -# CloudFoundry -.cf - # VSCode .vscode .pythonenv @@ -85,3 +87,7 @@ celerybeat.pid # Misc /scripts/run_my_tests.sh jinja_templates/ + +cypress.env.json +node_modules/ +tests_cypress/cypress/videos/ diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000000..711f43a9c6 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,69 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Python: current file", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal" + }, + { + "name": "Python: Flask", + "type": "python", + "request": "launch", + "module": "flask", + "env": { + "FLASK_APP": "application.py", + "FLASK_ENV": "development" + }, + "args": [ + "run", + "--no-debugger", + "-p 6011", + "--host=0.0.0.0" + ], + "jinja": true, + "justMyCode": false + }, + { + "name": "Python: Celery", + "type": "python", + "request": "launch", + "module": "celery", + "console": "integratedTerminal", + "justMyCode": false, + "args": [ + "--app", + "run_celery", + "worker", + "--pidfile", + "/tmp/celery.pid", + "--concurrency=4", + "-l", + "DEBUG", + "-Q", + "database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,service-callbacks-retry,send-sms-tasks,send-sms-high,send-sms-medium,send-sms-low,send-throttled-sms-tasks,send-email-high,send-email-medium,send-email-low,send-email-tasks,service-callbacks,delivery-receipts", + ] + }, + { + "name": "Locust", + "type": "python", + "request": "launch", + "module": "locust", + "args": [ + "-f", + "${file}", + "-u", + "1", + "-r", + "1", + "-t", + "3", + "--headless" + ], + "gevent": true, + "console": "integratedTerminal" + } + ] +} \ No newline at end of file diff --git a/DATAFLOW.md b/DATAFLOW.md new file mode 100644 index 0000000000..1845e125cf --- /dev/null +++ b/DATAFLOW.md @@ -0,0 +1,105 @@ +# Data Flow + +These diagrams show the movement of notification data and the tasks that move it along. Most tasks are run asyncronously by adding the task to an SQS queue and having our celery workers pick it up. Some tasks (particularly ones handling errors) are run on a schedule. + +We look at emails here, but the flows for sms are similar (with "save_smss" replacing "save_emails", and so on). + +## POST to /email + +### Happy path + +```mermaid + sequenceDiagram + + participant internet + participant redis inbox + participant redis inflight + participant RDS + participant SES + + internet ->> redis inbox: POST /email + redis inbox ->> redis inflight: beat-inbox-* + redis inflight ->> RDS: save_emails + RDS ->> SES: deliver_email +``` + +### Error saving to database + +```mermaid + sequenceDiagram + + participant internet + participant redis inbox + participant redis inflight + participant RDS + + internet ->> redis inbox: POST /email + redis inbox ->> redis inflight: beat-inbox-* + redis inflight --x RDS: save_emails + + redis inflight ->> redis inbox: in-flight-to-inbox +``` + +### Error sending to SES + +```mermaid + sequenceDiagram + + participant redis inflight + participant RDS + participant SES + + redis inflight ->> RDS: save_emails + RDS --x SES: deliver_email + RDS ->> SES: replay-created-notifications, deliver_email +``` + +## POST to /bulk + +### Happy path + +```mermaid + sequenceDiagram + + participant internet + participant RDS + participant SES + + internet ->> RDS: POST /bulk (job) + RDS ->> RDS: process_job, save_emails (notifications) + RDS ->> SES: deliver_email + RDS ->> SES: deliver_email +``` + +### process_job interrupted + +```mermaid + sequenceDiagram + + participant internet + participant RDS + participant SES + + internet ->> RDS: POST /bulk (job) + RDS --x RDS: process_job, save_emails (notifications) + + RDS ->> RDS: check_job_status, process-incomplete-jobs, save_emails (notifications) + RDS ->> SES: deliver_email + RDS ->> SES: deliver_email +``` + +### Error sending to SES + +```mermaid + sequenceDiagram + + participant internet + participant RDS + participant SES + + internet ->> RDS: POST /bulk (job) + RDS ->> RDS: process_job, save_emails (notifications) + RDS ->> SES: deliver_email + RDS --x SES: deliver_email + RDS ->> SES: replay-created-notifications, deliver_email +``` \ No newline at end of file diff --git a/Makefile b/Makefile index e08b43ae5c..919458bf49 100644 --- a/Makefile +++ b/Makefile @@ -21,24 +21,11 @@ test: generate-version-file ## Run tests .PHONY: freeze-requirements freeze-requirements: - rm -rf venv-freeze - virtualenv -p python3 venv-freeze - # Make sure we are using the latest pip prior to requirements installation. - $$(pwd)/venv-freeze/bin/python3 -m pip install --upgrade pip - $$(pwd)/venv-freeze/bin/pip install -r requirements-app.txt --no-cache-dir - echo '# pyup: ignore file' > requirements.txt - echo '# This file is autogenerated. Do not edit it manually.' >> requirements.txt - cat requirements-app.txt >> requirements.txt - echo '' >> requirements.txt - $$(pwd)/venv-freeze/bin/pip freeze -r <(sed '/^--/d' requirements-app.txt) | sed -n '/The following requirements were added by pip freeze/,$$p' >> requirements.txt - rm -rf venv-freeze + poetry lock --no-update .PHONY: test-requirements test-requirements: - @diff requirements-app.txt requirements.txt | grep '<' \ - && { echo "requirements.txt doesn't match requirements-app.txt."; \ - echo "Run 'make freeze-requirements' to update."; exit 1; } \ -|| { echo "requirements.txt is up to date"; exit 0; } + poetry check --lock .PHONY: coverage coverage: venv ## Create coverage report @@ -50,9 +37,35 @@ clean: .PHONY: format format: - isort . - black --config pyproject.toml . + poetry run isort . + poetry run black --config pyproject.toml . + poetry run flake8 . + poetry run mypy . .PHONY: smoke-test smoke-test: - cd tests_smoke && python smoke_test.py + cd tests_smoke && poetry run python smoke_test.py + +.PHONY: smoke-test-local +smoke-test-local: + cd tests_smoke && poetry run python smoke_test.py --local --nofiles + +.PHONY: run +run: ## Run the web app + poetry run flask run -p 6011 --host=0.0.0.0 + +.PHONY: run-celery-local +run-celery-local: ## Run the celery workers with all the queues + poetry run ./scripts/run_celery_local.sh + +.PHONY: run-celery-local-filtered +run-celery-local-filtered: ## Run the celery workers with all queues but filter out common scheduled tasks + poetry run ./scripts/run_celery_local.sh 2>&1 >/dev/null | grep -iEv 'beat|in-flight-to-inbox|run-scheduled-jobs|check-job-status' + +.PHONY: run-celery-purge +run-celery-purge: ## Purge the celery queues + poetry run ./scripts/run_celery_purge.sh + +.PHONY: run-db +run-db: ## psql to access dev database + psql postgres://postgres:chummy@db:5432/notification_api \ No newline at end of file diff --git a/README.md b/README.md index e87e011a51..e36ade9a6e 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Contains: - the public-facing REST API for Notification built on the GOV.UK Notify platform, which teams can integrate with using [their clients](https://www.notifications.service.gov.uk/documentation) - an internal-only REST API built using Flask to manage services, users, templates, etc (this is what the [admin app](http://github.com/cds-snc/notification-admin) talks to) - asynchronous workers built using Celery to put things on queues and read them off to be processed, sent to providers, updated, etc - + ## Functional constraints @@ -14,118 +14,66 @@ Contains: ## Setting Up -For any issues during the following instructions, make sure to review the +For any issues during the following instructions, make sure to review the **Frequent problems** section toward the end of the document. -### Local installation instruction - -On OS X: - -1. Install PyEnv with Homebrew. This will preserve your sanity. - -`brew install pyenv` - -2. Install Python 3.9.1 or whatever is the latest - -`pyenv install 3.9.1` - -3. If you expect no conflicts, set `3.9.1` as you default - -`pyenv global 3.9.1` - -4. Ensure it installed by running - -`python --version` - -if it did not, take a look here: https://github.com/pyenv/pyenv/issues/660 - -5. Install `virtualenv`: - -`pip install virtualenvwrapper` - -6. Add the following to your shell rc file. ex: `.bashrc` or `.zshrc` - -``` -source ~/.pyenv/versions/3.9.1/bin/virtualenvwrapper.sh -``` - -7. Restart your terminal and make your virtual environtment: - -`mkvirtualenv -p ~/.pyenv/versions/3.9.1/bin/python notifications-api` - -8. You can now return to your environment any time by entering - -`workon notifications-api` - -9. Install [Postgres.app](http://postgresapp.com/). +### Local installation instruction (Use Dev Containers) +#### In a [VS Code devcontainer](https://code.visualstudio.com/docs/remote/containers-tutorial) -10. Create the database for the application +1. Install VS Code -`createdb --user=postgres notification_api` +`brew install --cask visual-studio-code` -11. Install the required environment variables via our LastPast Vault +2. Install Docker -Within the team's *LastPass Vault*, you should find corresponding folders for this -project containing the `.env` content that you should copy in your project root folder. This -will grant the application necessary access to our internal infrastructure. +`brew install --cask docker` -If you don't have access to our *LastPass Vault* (as you evaluate our notification -platform for example), you will find a sane set of defaults exists in the `.env.example` -file. Copy that file to `.env` and customize it to your needs. +3. Install the [Remote-Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) -12. Install all dependencies +4. In VS Code run "Remote-Containers: Open Folder in Container..." and select this repository folder -`pip3 install -r requirements.txt` +5. Run the service -13. Generate the version file ?!? +`make run` -`make generate-version-file` -14. Run all DB migrations +## To run the queues -`flask db upgrade` +Run `make run-celery-local` or `make run-celery-local-filtered`. Note that the "filtered" option does not show the beat worker logs nor most scheduled tasks (this makes it easier to trace notification sending). -15. Run the service - -`flask run -p 6011 --host=0.0.0.0` - -15a. To test - -`pip3 install -r requirements_for_test.txt` - -`make test` +### Python version +This codebase is Python 3 only. At the moment we run 3.10.8 in production. You will run into problems if you try to use Python 3.4 or older. +### To run Performance tests -## To run the queues +Ask your teamate for the following keys and add to .env ``` -scripts/run_celery.sh +PERF_TEST_AUTH_HEADER = +PERF_TEST_BULK_EMAIL_TEMPLATE_ID= +PERF_TEST_EMAIL_WITH_LINK_TEMPLATE_ID= +PERF_TEST_EMAIL_TEMPLATE_ID= +PERF_TEST_EMAIL_WITH_ATTACHMENT_TEMPLATE_ID= +PERF_TEST_SMS_TEMPLATE_ID = ``` +And run the performance tests using. We generally test with 3000 users every 20 seconds (but use your best judgement). ``` -scripts/run_celery_sms.sh +locust -f tests-perf/locust/locust-notifications.py ``` -``` -scripts/run_celery_beat.sh -``` - -### Python version - -This codebase is Python 3 only. At the moment we run 3.9.1 in production. You will run into problems if you try to use Python 3.4 or older. - ## To update application dependencies -`requirements.txt` file is generated from the `requirements-app.txt` in order to pin -versions of all nested dependencies. If `requirements-app.txt` has been changed (or -we want to update the unpinned nested dependencies) `requirements.txt` should be +`poetry.lock` file is generated from the `pyproject.toml` in order to pin +versions of all nested dependencies. If `pyproject.toml` has been changed (or +we want to update the unpinned nested dependencies) `poetry.lock` should be regenerated with ``` -make freeze-requirements +poetry lock --no-update ``` -`requirements.txt` should be committed alongside `requirements-app.txt` changes. +`poetry.lock` should be committed alongside `pyproject.toml` changes. ## Using Local Jinja for testing template changes @@ -145,9 +93,24 @@ Jinja templates are pulled in from the [notification-utils](https://github.com/c 6. Remove `USE_LOCAL_JINJA_TEMPLATES=True` from your .env file, and delete any jinja in `jinja_templates`. Deleting the folder and jinja files is not required, but recommended. Make sure you're pulling up-to-date jinja from notification-utils the next time you need to make changes. +## Testing + +To help debug full code paths of emails and SMS, we have a special email and phone number +set in the application's configuration. As it stands at the moment these are the following: + +| Notification Type | Test destination | +| ----------------- | ------------------------ | +| Email | internal.test@cds-snc.ca | +| SMS | +16135550123 | + +Whereas the smoke test emails and long codes might not get through the whole GCNotify +data treatment, these will and have proper database fields populated. This is useful +for proper stress tests where the notifications shouldn't merely touch the API +front-door but also get through the Celery workers processing. + ## Frequent problems -__Problem__: No *postgres* role exists. +__Problem__: No *postgres* role exists. __Solution__: If the command complains you don't have a *postgres* role existing, execute the following command and retry the above afterward: @@ -158,18 +121,7 @@ createuser -l -s postgres __Problem__ : `E999 SyntaxError: invalid syntax` when running `flake8` -__Solution__ : Check that you are in your correct virtualenv, with python 3.9 - ---- - -__Problem__: -``` -/bin/sh: 1: Syntax error: "(" unexpected -make: *** [Makefile:31: freeze-requirements] Error 2 -``` -when running `make freeze-requirements` - -__Solution__: Change `/bin/sh` to `/bin/bash` in the `Makefile` +__Solution__ : Check that you are in your correct virtualenv, with python 3.10 --- @@ -187,12 +139,11 @@ __Solution__: Do not specify a database in your `.env` __Problem__: `sqlalchemy.exc.OperationalError: (psycopg2.OperationalError) fe_sendauth: no password supplied` -__Solution__: Ensure `SQLALCHEMY_DATABASE_URI` supplied in pytest.ini or your `.env` file is valid to your +__Solution__: Ensure `SQLALCHEMY_DATABASE_URI` supplied in pytest.ini or your `.env` file is valid to your local database with user access, (pytest.ini takes precedence) --- __Problem__: Messages are in the queue but not sending -__Solution__: Check that `celery` is running. - +__Solution__: Check that `celery` is running. diff --git a/app/__init__.py b/app/__init__.py index 83ddcef847..6685e5df44 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -4,18 +4,22 @@ import string import uuid from time import monotonic +from typing import Any from dotenv import load_dotenv -from flask import _request_ctx_stack, g, jsonify, make_response, request # type: ignore +from flask import g, jsonify, make_response, request # type: ignore from flask_marshmallow import Marshmallow from flask_migrate import Migrate +from flask_redis import FlaskRedis from notifications_utils import logging, request_helper +from notifications_utils.clients.redis.bounce_rate import RedisBounceRate from notifications_utils.clients.redis.redis_client import RedisClient from notifications_utils.clients.statsd.statsd_client import StatsdClient from notifications_utils.clients.zendesk.zendesk_client import ZendeskClient from werkzeug.exceptions import HTTPException as WerkzeugHTTPException from werkzeug.local import LocalProxy +from app.aws.metrics_logger import MetricsLogger from app.celery.celery import NotifyCelery from app.clients import Clients from app.clients.document_download import DocumentDownloadClient @@ -23,62 +27,132 @@ from app.clients.performance_platform.performance_platform_client import ( PerformancePlatformClient, ) +from app.clients.salesforce.salesforce_client import SalesforceClient +from app.clients.sms.aws_pinpoint import AwsPinpointClient from app.clients.sms.aws_sns import AwsSnsClient from app.dbsetup import RoutingSQLAlchemy -from app.encryption import Encryption +from app.encryption import CryptoSigner +from app.json_provider import NotifyJSONProvider +from app.queue import RedisQueue DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" DATE_FORMAT = "%Y-%m-%d" load_dotenv() -db = RoutingSQLAlchemy() +db: RoutingSQLAlchemy = RoutingSQLAlchemy() migrate = Migrate() -ma = Marshmallow() +marshmallow = Marshmallow() notify_celery = NotifyCelery() aws_ses_client = AwsSesClient() aws_sns_client = AwsSnsClient() -encryption = Encryption() +aws_pinpoint_client = AwsPinpointClient() +signer_notification = CryptoSigner() +signer_personalisation = CryptoSigner() +signer_complaint = CryptoSigner() +signer_delivery_status = CryptoSigner() +signer_bearer_token = CryptoSigner() +signer_api_key = CryptoSigner() +signer_inbound_sms = CryptoSigner() zendesk_client = ZendeskClient() statsd_client = StatsdClient() +flask_redis = FlaskRedis() +flask_redis_publish = FlaskRedis(config_prefix="REDIS_PUBLISH") redis_store = RedisClient() +bounce_rate_client = RedisBounceRate(redis_store) +metrics_logger = MetricsLogger() +# TODO: Rework instantiation to decouple redis_store.redis_store and pass it in.\ +email_queue = RedisQueue("email") +sms_queue = RedisQueue("sms") performance_platform_client = PerformancePlatformClient() document_download_client = DocumentDownloadClient() +salesforce_client = SalesforceClient() clients = Clients() -api_user = LocalProxy(lambda: _request_ctx_stack.top.api_user) -authenticated_service = LocalProxy(lambda: _request_ctx_stack.top.authenticated_service) +api_user: Any = LocalProxy(lambda: g.api_user) +authenticated_service: Any = LocalProxy(lambda: g.authenticated_service) +sms_bulk = RedisQueue("sms", process_type="bulk") +sms_normal = RedisQueue("sms", process_type="normal") +sms_priority = RedisQueue("sms", process_type="priority") +email_bulk = RedisQueue("email", process_type="bulk") +email_normal = RedisQueue("email", process_type="normal") +email_priority = RedisQueue("email", process_type="priority") + +sms_bulk_publish = RedisQueue("sms", process_type="bulk") +sms_normal_publish = RedisQueue("sms", process_type="normal") +sms_priority_publish = RedisQueue("sms", process_type="priority") +email_bulk_publish = RedisQueue("email", process_type="bulk") +email_normal_publish = RedisQueue("email", process_type="normal") +email_priority_publish = RedisQueue("email", process_type="priority") -def create_app(application): - from app.config import configs - notify_environment = os.getenv("NOTIFY_ENVIRONMENT", "development") +def create_app(application, config=None): + from app.config import configs - application.config.from_object(configs[notify_environment]) + if config is None: + notify_environment = os.getenv("NOTIFY_ENVIRONMENT", "development") + config = configs[notify_environment] + application.config.from_object(configs[notify_environment]) + else: + application.config.from_object(config) application.config["NOTIFY_APP_NAME"] = application.name init_app(application) + application.json = NotifyJSONProvider(application) request_helper.init_app(application) db.init_app(application) migrate.init_app(application, db=db) - ma.init_app(application) + marshmallow.init_app(application) zendesk_client.init_app(application) statsd_client.init_app(application) logging.init_app(application, statsd_client) aws_sns_client.init_app(application, statsd_client=statsd_client) + aws_pinpoint_client.init_app(application, statsd_client=statsd_client) aws_ses_client.init_app(application.config["AWS_REGION"], statsd_client=statsd_client) notify_celery.init_app(application) - encryption.init_app(application) - redis_store.init_app(application) + + signer_notification.init_app(application, secret_key=application.config["SECRET_KEY"], salt="notification") + signer_personalisation.init_app(application, secret_key=application.config["SECRET_KEY"], salt="personalisation") + signer_complaint.init_app(application, secret_key=application.config["SECRET_KEY"], salt="complaint") + signer_delivery_status.init_app(application, secret_key=application.config["SECRET_KEY"], salt="delivery_status") + signer_bearer_token.init_app(application, secret_key=application.config["SECRET_KEY"], salt="bearer_token") + signer_api_key.init_app(application, secret_key=application.config["SECRET_KEY"], salt="api_key") + signer_inbound_sms.init_app(application, secret_key=application.config["SECRET_KEY"], salt="inbound_sms") + performance_platform_client.init_app(application) document_download_client.init_app(application) - clients.init_app(sms_clients=[aws_sns_client], email_clients=[aws_ses_client]) + clients.init_app(sms_clients=[aws_sns_client, aws_pinpoint_client], email_clients=[aws_ses_client]) + + if application.config["FF_SALESFORCE_CONTACT"]: + salesforce_client.init_app(application) + + flask_redis.init_app(application) + flask_redis_publish.init_app(application) + redis_store.init_app(application) + bounce_rate_client.init_app(application) + + sms_bulk_publish.init_app(flask_redis_publish, metrics_logger) + sms_normal_publish.init_app(flask_redis_publish, metrics_logger) + sms_priority_publish.init_app(flask_redis_publish, metrics_logger) + email_bulk_publish.init_app(flask_redis_publish, metrics_logger) + email_normal_publish.init_app(flask_redis_publish, metrics_logger) + email_priority_publish.init_app(flask_redis_publish, metrics_logger) + + sms_bulk.init_app(flask_redis, metrics_logger) + sms_normal.init_app(flask_redis, metrics_logger) + sms_priority.init_app(flask_redis, metrics_logger) + email_bulk.init_app(flask_redis, metrics_logger) + email_normal.init_app(flask_redis, metrics_logger) + email_priority.init_app(flask_redis, metrics_logger) register_blueprint(application) register_v2_blueprints(application) + # Log the application configuration + application.logger.info(f"Notify config: {config.get_safe_config()}") + # avoid circular imports by importing this file later from app.commands import setup_commands @@ -87,13 +161,23 @@ def create_app(application): return application +def register_notify_blueprint(application, blueprint, auth_function, prefix=None): + if not blueprint._got_registered_once: + blueprint.before_request(auth_function) + if prefix: + application.register_blueprint(blueprint, url_prefix=prefix) + else: + application.register_blueprint(blueprint) + + def register_blueprint(application): from app.accept_invite.rest import accept_invite - from app.api_key.rest import api_key_blueprint + from app.api_key.rest import api_key_blueprint, sre_tools_blueprint from app.authentication.auth import ( requires_admin_auth, requires_auth, requires_no_auth, + requires_sre_auth, ) from app.billing.rest import billing_blueprint from app.complaint.complaint_rest import complaint_blueprint @@ -118,86 +202,66 @@ def register_blueprint(application): from app.status.healthcheck import status as status_blueprint from app.support.rest import support_blueprint from app.template.rest import template_blueprint + from app.template.template_category_rest import template_category_blueprint from app.template_folder.rest import template_folder_blueprint from app.template_statistics.rest import ( template_statistics as template_statistics_blueprint, ) from app.user.rest import user_blueprint - service_blueprint.before_request(requires_admin_auth) - application.register_blueprint(service_blueprint, url_prefix="/service") + register_notify_blueprint(application, service_blueprint, requires_admin_auth, "/service") - user_blueprint.before_request(requires_admin_auth) - application.register_blueprint(user_blueprint, url_prefix="/user") + register_notify_blueprint(application, user_blueprint, requires_admin_auth, "/user") - template_blueprint.before_request(requires_admin_auth) - application.register_blueprint(template_blueprint) + register_notify_blueprint(application, template_blueprint, requires_admin_auth) - status_blueprint.before_request(requires_no_auth) - application.register_blueprint(status_blueprint) + register_notify_blueprint(application, status_blueprint, requires_no_auth) - notifications_blueprint.before_request(requires_auth) - application.register_blueprint(notifications_blueprint) + register_notify_blueprint(application, notifications_blueprint, requires_auth) - job_blueprint.before_request(requires_admin_auth) - application.register_blueprint(job_blueprint) + register_notify_blueprint(application, job_blueprint, requires_admin_auth) - invite_blueprint.before_request(requires_admin_auth) - application.register_blueprint(invite_blueprint) + register_notify_blueprint(application, invite_blueprint, requires_admin_auth) - inbound_number_blueprint.before_request(requires_admin_auth) - application.register_blueprint(inbound_number_blueprint) + register_notify_blueprint(application, inbound_number_blueprint, requires_admin_auth) - inbound_sms_blueprint.before_request(requires_admin_auth) - application.register_blueprint(inbound_sms_blueprint) + register_notify_blueprint(application, inbound_sms_blueprint, requires_admin_auth) - accept_invite.before_request(requires_admin_auth) - application.register_blueprint(accept_invite, url_prefix="/invite") + register_notify_blueprint(application, accept_invite, requires_admin_auth, "/invite") - template_statistics_blueprint.before_request(requires_admin_auth) - application.register_blueprint(template_statistics_blueprint) + register_notify_blueprint(application, template_statistics_blueprint, requires_admin_auth) - events_blueprint.before_request(requires_admin_auth) - application.register_blueprint(events_blueprint) + register_notify_blueprint(application, events_blueprint, requires_admin_auth) - provider_details_blueprint.before_request(requires_admin_auth) - application.register_blueprint(provider_details_blueprint, url_prefix="/provider-details") + register_notify_blueprint(application, provider_details_blueprint, requires_admin_auth, "/provider-details") - email_branding_blueprint.before_request(requires_admin_auth) - application.register_blueprint(email_branding_blueprint, url_prefix="/email-branding") + register_notify_blueprint(application, email_branding_blueprint, requires_admin_auth, "/email-branding") - api_key_blueprint.before_request(requires_admin_auth) - application.register_blueprint(api_key_blueprint, url_prefix="/api-key") + register_notify_blueprint(application, api_key_blueprint, requires_admin_auth, "/api-key") - letter_job.before_request(requires_admin_auth) - application.register_blueprint(letter_job) + register_notify_blueprint(application, sre_tools_blueprint, requires_sre_auth, "/sre-tools") - letter_callback_blueprint.before_request(requires_no_auth) - application.register_blueprint(letter_callback_blueprint) + register_notify_blueprint(application, letter_job, requires_admin_auth) - billing_blueprint.before_request(requires_admin_auth) - application.register_blueprint(billing_blueprint) + register_notify_blueprint(application, letter_callback_blueprint, requires_no_auth) - service_callback_blueprint.before_request(requires_admin_auth) - application.register_blueprint(service_callback_blueprint) + register_notify_blueprint(application, billing_blueprint, requires_admin_auth) - organisation_blueprint.before_request(requires_admin_auth) - application.register_blueprint(organisation_blueprint, url_prefix="/organisations") + register_notify_blueprint(application, service_callback_blueprint, requires_admin_auth) - organisation_invite_blueprint.before_request(requires_admin_auth) - application.register_blueprint(organisation_invite_blueprint) + register_notify_blueprint(application, organisation_blueprint, requires_admin_auth, "/organisations") - complaint_blueprint.before_request(requires_admin_auth) - application.register_blueprint(complaint_blueprint) + register_notify_blueprint(application, organisation_invite_blueprint, requires_admin_auth) - platform_stats_blueprint.before_request(requires_admin_auth) - application.register_blueprint(platform_stats_blueprint, url_prefix="/platform-stats") + register_notify_blueprint(application, complaint_blueprint, requires_admin_auth) - template_folder_blueprint.before_request(requires_admin_auth) - application.register_blueprint(template_folder_blueprint) + register_notify_blueprint(application, platform_stats_blueprint, requires_admin_auth, "/platform-stats") - letter_branding_blueprint.before_request(requires_admin_auth) - application.register_blueprint(letter_branding_blueprint) + register_notify_blueprint(application, template_folder_blueprint, requires_admin_auth) + + register_notify_blueprint(application, letter_branding_blueprint, requires_admin_auth) + + register_notify_blueprint(application, template_category_blueprint, requires_admin_auth) support_blueprint.before_request(requires_admin_auth) application.register_blueprint(support_blueprint, url_prefix="/support") @@ -208,33 +272,25 @@ def register_v2_blueprints(application): from app.v2.inbound_sms.get_inbound_sms import ( v2_inbound_sms_blueprint as get_inbound_sms, ) - from app.v2.notifications.get_notifications import ( - v2_notification_blueprint as get_notifications, + from app.v2.notifications import ( # noqa + get_notifications, + post_notifications, + v2_notification_blueprint, ) - from app.v2.notifications.post_notifications import ( - v2_notification_blueprint as post_notifications, + from app.v2.template import ( # noqa + get_template, + post_template, + v2_template_blueprint, ) - from app.v2.template.get_template import v2_template_blueprint as get_template - from app.v2.template.post_template import v2_template_blueprint as post_template from app.v2.templates.get_templates import v2_templates_blueprint as get_templates - post_notifications.before_request(requires_auth) - application.register_blueprint(post_notifications) - - get_notifications.before_request(requires_auth) - application.register_blueprint(get_notifications) - - get_templates.before_request(requires_auth) - application.register_blueprint(get_templates) + register_notify_blueprint(application, v2_notification_blueprint, requires_auth) - get_template.before_request(requires_auth) - application.register_blueprint(get_template) + register_notify_blueprint(application, get_templates, requires_auth) - post_template.before_request(requires_auth) - application.register_blueprint(post_template) + register_notify_blueprint(application, v2_template_blueprint, requires_auth) - get_inbound_sms.before_request(requires_auth) - application.register_blueprint(get_inbound_sms) + register_notify_blueprint(application, get_inbound_sms, requires_auth) def init_app(app): diff --git a/app/accept_invite/rest.py b/app/accept_invite/rest.py index 8c906cae90..8c18066cc0 100644 --- a/app/accept_invite/rest.py +++ b/app/accept_invite/rest.py @@ -13,16 +13,10 @@ @accept_invite.route("//", methods=["GET"]) def validate_invitation_token(invitation_type, token): - max_age_seconds = 60 * 60 * 24 * current_app.config["INVITATION_EXPIRATION_DAYS"] try: - invited_user_id = check_token( - token, - current_app.config["SECRET_KEY"], - current_app.config["DANGEROUS_SALT"], - max_age_seconds, - ) + invited_user_id = check_token(token=token, secret=current_app.config["SECRET_KEY"], max_age_seconds=max_age_seconds) except SignatureExpired: errors = {"invitation": "invitation expired"} raise InvalidRequest(errors, status_code=400) @@ -32,7 +26,7 @@ def validate_invitation_token(invitation_type, token): if invitation_type == "service": invited_user = get_invited_user_by_id(invited_user_id) - return jsonify(data=invited_user_schema.dump(invited_user).data), 200 + return jsonify(data=invited_user_schema.dump(invited_user)), 200 elif invitation_type == "organisation": invited_user = dao_get_invited_organisation_user(invited_user_id) return jsonify(data=invited_user.serialize()), 200 diff --git a/app/api_key/rest.py b/app/api_key/rest.py index 376f3ed057..1aa6b28fd9 100644 --- a/app/api_key/rest.py +++ b/app/api_key/rest.py @@ -1,16 +1,29 @@ -from flask import Blueprint, jsonify +from datetime import datetime + +import werkzeug +from flask import Blueprint, current_app, jsonify, request from app import DATETIME_FORMAT +from app.dao.api_key_dao import ( + expire_api_key, + get_api_key_by_secret, + update_compromised_api_key_info, +) from app.dao.fact_notification_status_dao import ( get_api_key_ranked_by_notifications_created, get_last_send_for_api_key, get_total_notifications_sent_for_api_key, ) -from app.errors import register_errors +from app.dao.services_dao import dao_fetch_service_by_id +from app.errors import InvalidRequest, register_errors +from app.service.sender import send_notification_to_service_users api_key_blueprint = Blueprint("api_key", __name__) register_errors(api_key_blueprint) +sre_tools_blueprint = Blueprint("sre_tools", __name__) +register_errors(sre_tools_blueprint) + @api_key_blueprint.route("//summary-statistics", methods=["GET"]) def get_api_key_stats(api_key_id): @@ -59,3 +72,83 @@ def get_api_keys_ranked(n_days_back): } ) return jsonify(data=data) + + +def send_api_key_revocation_email(service_id, api_key_name, api_key_information): + service = dao_fetch_service_by_id(service_id) + send_notification_to_service_users( + service_id=service_id, + template_id=current_app.config["APIKEY_REVOKE_TEMPLATE_ID"], + personalisation={ + "service_name": service.name, + "public_location": api_key_information["url"], + "key_name": api_key_name, + }, + include_user_fields=["name"], + ) + + +@sre_tools_blueprint.route("/api-key-revoke", methods=["POST"]) +def revoke_api_keys(): + """ + This method accepts a single api key and revokes it. The data is of the form: + { + "token": "gcntfy-key-name-uuid-uuid", + "type": "mycompany_api_token", + "url": "https://github.com/octocat/Hello-World/blob/12345600b9cbe38a219f39a9941c9319b600c002/foo/bar.txt", + "source": "content", + } + + The function does 4 things: + 1. Finds the api key by API key itself + 2. Revokes the API key + 3. Saves the source and url into the compromised_key_info field + 4. TODO: Sends the service owners of the api key an email notification indicating that the key has been revoked + """ + try: + api_key_data = request.get_json() + # check for correct payload + if ( + isinstance(api_key_data, list) + or api_key_data.get("token") is None + or api_key_data.get("type") is None + or api_key_data.get("url") is None + or api_key_data.get("source") is None + ): + raise InvalidRequest("Invalid payload", status_code=400) + except werkzeug.exceptions.BadRequest as errors: + raise InvalidRequest(errors, status_code=400) + + # Step 1 + try: + api_key_token = api_key_data["token"] + api_key = get_api_key_by_secret(api_key_token) + except Exception: + current_app.logger.error( + "Revoke api key: API key not found for token {}".format(api_key_data["token"]) + if api_key_data.get("token") + else "Revoke api key: no token provided" + ) + raise InvalidRequest("Invalid request", status_code=400) + + # Step 2 + expire_api_key(api_key.service_id, api_key.id) + + current_app.logger.info("Expired api key {} for service {}".format(api_key.id, api_key.service_id)) + + # Step 3 + update_compromised_api_key_info( + api_key.service_id, + api_key.id, + { + "time_of_revocation": str(datetime.utcnow()), + "type": api_key_data["type"], + "url": api_key_data["url"], + "source": api_key_data["source"], + }, + ) + + # Step 4 + send_api_key_revocation_email(api_key.service_id, api_key.name, api_key_data) + + return jsonify(result="ok"), 201 diff --git a/app/authentication/auth.py b/app/authentication/auth.py index dbfed8ac9b..5fe609a060 100644 --- a/app/authentication/auth.py +++ b/app/authentication/auth.py @@ -1,4 +1,4 @@ -from flask import _request_ctx_stack, current_app, g, request # type: ignore +from flask import current_app, g, request # type: ignore from jwt import PyJWTError from notifications_python_client.authentication import ( decode_jwt_token, @@ -63,7 +63,7 @@ def get_auth_token(req): for el in AUTH_TYPES: scheme, auth_type, _ = el if auth_header.lower().startswith(scheme.lower()): - token = auth_header[len(scheme) + 1 :] + token = auth_header[len(scheme) + 1 :].strip() return auth_type, token raise AuthError( @@ -93,6 +93,21 @@ def requires_admin_auth(): raise AuthError("Unauthorized, admin authentication token required", 401) +def requires_sre_auth(): + request_helper.check_proxy_header_before_request() + + auth_type, auth_token = get_auth_token(request) + if auth_type != JWT_AUTH_TYPE: + raise AuthError("Invalid scheme: can only use JWT for sre authentication", 401) + client = __get_token_issuer(auth_token) + + if client == current_app.config.get("SRE_USER_NAME"): + g.service_id = current_app.config.get("SRE_USER_NAME") + return handle_admin_key(auth_token, current_app.config.get("SRE_CLIENT_SECRET")) + else: + raise AuthError("Unauthorized, sre authentication token required", 401) + + def requires_auth(): request_helper.check_proxy_header_before_request() @@ -138,11 +153,11 @@ def requires_auth(): def _auth_by_api_key(auth_token): try: - # take last 36 chars of string so that it works even if the full key is provided. - auth_token = auth_token[-36:] api_key = get_api_key_by_secret(auth_token) except NoResultFound: raise AuthError("Invalid token: API key not found", 403) + except ValueError: + raise AuthError("Invalid token: Enter your full API key", 403) _auth_with_api_key(api_key, api_key.service) @@ -154,9 +169,10 @@ def _auth_with_api_key(api_key, service): service_id=service.id, api_key_id=api_key.id, ) + g.service_id = api_key.service_id - _request_ctx_stack.top.authenticated_service = service - _request_ctx_stack.top.api_user = api_key + g.authenticated_service = service + g.api_user = api_key current_app.logger.info( "API authorised for service {} with api key {}, using client {}".format( service.id, api_key.id, request.headers.get("User-Agent") diff --git a/app/aws/metrics.py b/app/aws/metrics.py new file mode 100644 index 0000000000..e947862a50 --- /dev/null +++ b/app/aws/metrics.py @@ -0,0 +1,158 @@ +from __future__ import annotations # PEP 563 -- Postponed Evaluation of Annotations + +from typing import TYPE_CHECKING, Optional + +from botocore.exceptions import ClientError +from flask import current_app + +if TYPE_CHECKING: # A special Python 3 constant that is assumed to be True by 3rd party static type checkers + from app.aws.metrics_logger import MetricsLogger + from app.queue import RedisQueue + + +def put_batch_saving_metric(metrics_logger: MetricsLogger, queue: RedisQueue, count: int): + """ + Metric to calculate how many items are put in an INBOX + + Args: + queue (RedisQueue): Implementation of queue.RedisQueue for BatchSaving + count (int): count of an item added to the INBOX. + metrics (MetricsLogger): Submit metric to cloudwatch + """ + if metrics_logger.metrics_config.disable_metric_extraction: + return + try: + metrics_logger.set_namespace("NotificationCanadaCa") + metrics_logger.put_metric("batch_saving_published", count, "Count") + metrics_logger.set_dimensions({"list_name": queue._inbox}) + metrics_logger.flush() + except ClientError as e: + message = "Error sending CloudWatch Metric: {}".format(e) + current_app.logger.warning(message) + return + + +def put_batch_saving_inflight_metric(metrics_logger: MetricsLogger, queue: RedisQueue, count: int): + """ + Metric to calculate how many inflight lists have been created + + Args: + count (int): count of an inflight list created. + metrics (MetricsLogger): Submit metric to cloudwatch + """ + if metrics_logger.metrics_config.disable_metric_extraction: + return + try: + metrics_logger.set_namespace("NotificationCanadaCa") + metrics_logger.put_metric("batch_saving_inflight", count, "Count") + metrics_logger.set_dimensions({"created": "True", "notification_type": queue._suffix, "priority": queue._process_type}) + metrics_logger.flush() + except ClientError as e: + message = "Error sending CloudWatch Metric: {}".format(e) + current_app.logger.warning(message) + return + + +def put_batch_saving_inflight_processed(metrics_logger: MetricsLogger, queue: RedisQueue, count: int): + """ + Metric to calculate how many inflight lists have been processed. + + Args: + count (int): count of an inflight list created. + metrics (MetricsLogger): Submit metric to cloudwatch + """ + if metrics_logger.metrics_config.disable_metric_extraction: + return + try: + metrics_logger.set_namespace("NotificationCanadaCa") + metrics_logger.put_metric("batch_saving_inflight", count, "Count") + metrics_logger.set_dimensions( + {"acknowledged": "True", "notification_type": queue._suffix, "priority": queue._process_type} + ) + metrics_logger.flush() + except ClientError as e: + message = "Error sending CloudWatch Metric: {}".format(e) + current_app.logger.warning(message) + return + + +def put_batch_saving_expiry_metric(metrics_logger: MetricsLogger, queue: RedisQueue, count: int): + """ + Metric to calculate how many inflight list have not been processed and instead + sent back to the inbox. + + Args: + count (int): Number of inflight lists sent to inbox + metrics (MetricsLogger): Submit metric to cloudwatch + """ + if metrics_logger.metrics_config.disable_metric_extraction: + return + try: + metrics_logger.set_namespace("NotificationCanadaCa") + metrics_logger.put_metric("batch_saving_inflight", count, "Count") + metrics_logger.set_dimensions({"expired": "True", "notification_type": queue._suffix, "priority": queue._process_type}) + metrics_logger.flush() + metrics_logger.put_metric("batch_saving_inflight", count, "Count") + metrics_logger.set_dimensions({"expired": "True", "notification_type": "any", "priority": "any"}) + metrics_logger.flush() + except ClientError as e: + message = "Error sending CloudWatch Metric: {}".format(e) + current_app.logger.warning(message) + return + + +def put_batch_saving_bulk_created( + metrics_logger: MetricsLogger, count: int, notification_type: Optional[str] = None, priority: Optional[str] = None +): + """ + Metric to calculate how many notifications are sent through + the bulk api + + Args: + count (int): Number of bulk job batches created + metrics (MetricsLogger): Submit metric to cloudwatch + type: priority and notification type + """ + if metrics_logger.metrics_config.disable_metric_extraction: + return + try: + metrics_logger.set_namespace("NotificationCanadaCa") + metrics_logger.put_metric("batch_saving_bulk", count, "Count") + if notification_type is None or priority is None: + current_app.logger.warning("either notification_type or priority is None") + metrics_logger.set_dimensions({"created": "True"}) + else: + metrics_logger.set_dimensions({"created": "True", "notification_type": notification_type, "priority": priority}) + metrics_logger.flush() + except ClientError as e: + message = "Error sending CloudWatch Metric: {}".format(e) + current_app.logger.warning(message) + return + + +def put_batch_saving_bulk_processed( + metrics_logger: MetricsLogger, count: int, notification_type: Optional[str] = None, priority: Optional[str] = None +): + """ + Metric to calculate how many bulk insertion have been processed. + + Args: + count (int): Number of bulk job batches processed + metrics (MetricsLogger): Submit metric to cloudwatch + type: priority and notification type + """ + if metrics_logger.metrics_config.disable_metric_extraction: + return + try: + metrics_logger.set_namespace("NotificationCanadaCa") + metrics_logger.put_metric("batch_saving_bulk", count, "Count") + if notification_type is None or priority is None: + current_app.logger.warning("either notification_type or priority is None") + metrics_logger.set_dimensions({"acknowledged": "True"}) + else: + metrics_logger.set_dimensions({"acknowledged": "True", "notification_type": notification_type, "priority": priority}) + metrics_logger.flush() + except ClientError as e: + message = "Error sending CloudWatch Metric: {}".format(e) + current_app.logger.warning(message) + return diff --git a/app/aws/metrics_logger.py b/app/aws/metrics_logger.py new file mode 100644 index 0000000000..c1a3fb5da3 --- /dev/null +++ b/app/aws/metrics_logger.py @@ -0,0 +1,46 @@ +from os import environ + +from aws_embedded_metrics import MetricsLogger as _MetricsLogger # type: ignore +from aws_embedded_metrics.config import get_config # type: ignore +from aws_embedded_metrics.environment.ec2_environment import ( # type: ignore + EC2Environment, +) +from aws_embedded_metrics.environment.lambda_environment import ( # type: ignore + LambdaEnvironment, +) +from aws_embedded_metrics.environment.local_environment import ( # type: ignore + LocalEnvironment, +) + +from app.config import Config + + +class MetricsLogger(_MetricsLogger): + def __init__(self): + super().__init__(None, None) + self.metrics_config = get_config() + self.metrics_config.service_name = "BatchSaving" + self.metrics_config.service_type = "Redis" + self.metrics_config.log_group_name = "BatchSaving" + + if not Config.FF_CLOUDWATCH_METRICS_ENABLED: + self.metrics_config.disable_metric_extraction = True + + if "AWS_EXECUTION_ENV" in environ: + self.metrics_config.environment = "lambda" + else: + self.metrics_config.agent_endpoint = Config.CLOUDWATCH_AGENT_ENDPOINT + + lower_configured_enviroment = self.metrics_config.environment.lower() + if lower_configured_enviroment == "local": + self.environment = LocalEnvironment() + elif lower_configured_enviroment == "lambda": + self.environment = LambdaEnvironment() + else: + self.environment = EC2Environment() + + def flush(self) -> None: + """Override the default async MetricsLogger.flush method, flushing to stdout immediately""" + sink = self.environment.get_sink() + sink.accept(self.context) # type: ignore + self.context = self.context.create_copy_with_context() # type: ignore diff --git a/app/aws/mocks.py b/app/aws/mocks.py index e4f9d25d7a..99c7dad216 100644 --- a/app/aws/mocks.py +++ b/app/aws/mocks.py @@ -127,6 +127,27 @@ def ses_complaint_callback(): } +def ses_complaint_callback_with_subtype(subtype): + """ + https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object + """ + return { + "Signature": "bb", + "SignatureVersion": "1", + "MessageAttributes": {}, + "MessageId": "98c6e927-af5d-5f3b-9522-bab736f2cbde", + "UnsubscribeUrl": "https://sns.eu-west-1.amazonaws.com", + "TopicArn": "arn:ses_notifications", + "Type": "Notification", + "Timestamp": "2018-06-05T14:00:15.952Z", + "Subject": None, + "Message": '{"notificationType":"Complaint","complaint":{"complaintFeedbackType": "abuse", "complainedRecipients":[{"emailAddress":"recipient1@example.com"}],"timestamp":"2018-06-05T13:59:58.000Z","feedbackId":"ses_feedback_id", "complaintSubType":"' + + subtype + + '"},"mail":{"timestamp":"2018-06-05T14:00:15.950Z","source":"\\"Some Service\\" ","sourceArn":"arn:identity/notifications.service.gov.uk","sourceIp":"52.208.24.161","sendingAccountId":"888450439860","messageId":"ref1","destination":["recipient1@example.com"]}}', # noqa + "SigningCertUrl": "https://sns.pem", + } + + def sns_success_callback(reference=None, timestamp="2016-06-28 00:40:34.558", destination="+1XXX5550100"): # Payload details: https://docs.aws.amazon.com/sns/latest/dg/sms_stats_cloudwatch.html body = { @@ -171,6 +192,106 @@ def sns_failed_callback(provider_response, reference=None, timestamp="2016-06-28 return _sns_callback(body) +# Note that 1467074434 = 2016-06-28 00:40:34.558 UTC +def pinpoint_successful_callback(reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_SUCCESSFUL", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": False, + "originationPhoneNumber": "+13655550100", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "mcc": "302", + "mnc": "610", + "carrierName": "Bell Cellular Inc. / Aliant Telecom", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "SUCCESSFUL", + "messageStatusDescription": "Message has been accepted by phone carrier", + "totalMessageParts": 1, + "totalMessagePrice": 0.00581, + "totalCarrierFee": 0.00767, + } + + return _pinpoint_callback(body) + + +def pinpoint_delivered_callback(reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_DELIVERED", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": True, + "originationPhoneNumber": "+13655550100", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "mcc": "302", + "mnc": "610", + "carrierName": "Bell Cellular Inc. / Aliant Telecom", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "DELIVERED", + "messageStatusDescription": "Message has been accepted by phone", + "totalMessageParts": 1, + "totalMessagePrice": 0.00581, + "totalCarrierFee": 0.006, + } + + return _pinpoint_callback(body) + + +def pinpoint_shortcode_delivered_callback(reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_SUCCESSFUL", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": True, + "originationPhoneNumber": "555555", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "SUCCESSFUL", + "messageStatusDescription": "Message has been accepted by phone carrier", + "totalMessageParts": 1, + "totalMessagePrice": 0.02183, + "totalCarrierFee": 0.005, + } + + return _pinpoint_callback(body) + + +# Note that 1467074434 = 2016-06-28 00:40:34.558 UTC +def pinpoint_failed_callback(provider_response, reference=None, timestamp=1467074434, destination="+1XXX5550100"): + body = { + "eventType": "TEXT_CARRIER_UNREACHABLE", + "eventVersion": "1.0", + "eventTimestamp": timestamp, + "isFinal": True, + "originationPhoneNumber": "+13655550100", + "destinationPhoneNumber": destination, + "isoCountryCode": "CA", + "messageId": reference, + "messageRequestTimestamp": timestamp, + "messageEncoding": "GSM", + "messageType": "TRANSACTIONAL", + "messageStatus": "CARRIER_UNREACHABLE", + "messageStatusDescription": provider_response, + "totalMessageParts": 1, + "totalMessagePrice": 0.00581, + "totalCarrierFee": 0.006, + } + + return _pinpoint_callback(body) + + def _ses_bounce_callback(reference, bounce_type, bounce_subtype=None): ses_message_body = { "bounce": { @@ -246,3 +367,19 @@ def _sns_callback(body): "UnsubscribeUrl": "https://sns.ca-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=[REACTED]", "MessageAttributes": {}, } + + +def _pinpoint_callback(body): + return { + "Type": "Notification", + "MessageId": "8e83c020-1234-1234-1234-92a8ee9baa0a", + "TopicArn": "arn:aws:sns:ca-central-1:12341234:ses_notifications", + "Subject": None, + "Message": json.dumps(body), + "Timestamp": "2017-11-17T12:14:03.710Z", + "SignatureVersion": "1", + "Signature": "[REDACTED]", + "SigningCertUrl": "https://sns.ca-central-1.amazonaws.com/SimpleNotificationService-[REDACTED].pem", + "UnsubscribeUrl": "https://sns.ca-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=[REACTED]", + "MessageAttributes": {}, + } diff --git a/app/aws/s3.py b/app/aws/s3.py index 524eb876a9..9ebeb6d137 100644 --- a/app/aws/s3.py +++ b/app/aws/s3.py @@ -1,5 +1,6 @@ import uuid from datetime import datetime, timedelta +from typing import List import botocore import pytz @@ -7,6 +8,8 @@ from flask import current_app from notifications_utils.s3 import s3upload as utils_s3upload +from app.models import Job + FILE_LOCATION_STRUCTURE = "service-{}-notify/{}.csv" @@ -60,8 +63,20 @@ def get_job_metadata_from_s3(service_id, job_id): return obj.get()["Metadata"] -def remove_job_from_s3(service_id, job_id): - return remove_s3_object(*get_job_location(service_id, job_id)) +def remove_jobs_from_s3(jobs: List[Job], batch_size=1000): + """ + Remove the files from S3 for the given jobs. + + Args: + jobs (List[Job]): The jobs whose files need to be removed from S3. + batch_size (int, optional): The number of jobs to process in each boto call. Defaults to the AWS maximum of 1000. + """ + + bucket = resource("s3").Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"]) + + for start in range(0, len(jobs), batch_size): + object_keys = [FILE_LOCATION_STRUCTURE.format(job.service_id, job.id) for job in jobs[start : start + batch_size]] + bucket.delete_objects(Delete={"Objects": [{"Key": key} for key in object_keys]}) def get_s3_bucket_objects(bucket_name, subfolder="", older_than=7, limit_days=2): diff --git a/app/billing/rest.py b/app/billing/rest.py index 4b024cc9d1..c2a7d34e8f 100644 --- a/app/billing/rest.py +++ b/app/billing/rest.py @@ -52,7 +52,6 @@ def get_yearly_billing_usage_summary_from_ft_billing(service_id): @billing_blueprint.route("/free-sms-fragment-limit", methods=["GET"]) def get_free_sms_fragment_limit(service_id): - financial_year_start = request.args.get("financial_year_start") annual_billing = dao_get_free_sms_fragment_limit_for_year(service_id, financial_year_start) @@ -90,7 +89,6 @@ def get_free_sms_fragment_limit(service_id): @billing_blueprint.route("/free-sms-fragment-limit", methods=["POST"]) def create_or_update_free_sms_fragment_limit(service_id): - req_args = request.get_json() form = validate(req_args, create_or_update_free_sms_fragment_limit_schema) diff --git a/app/celery/celery.py b/app/celery/celery.py index b1758f73c0..be5a5593aa 100644 --- a/app/celery/celery.py +++ b/app/celery/celery.py @@ -48,6 +48,7 @@ def init_app(self, app): "beat_schedule": app.config["CELERYBEAT_SCHEDULE"], "imports": app.config["CELERY_IMPORTS"], "task_serializer": app.config["CELERY_TASK_SERIALIZER"], + "enable_utc": app.config["CELERY_ENABLE_UTC"], "timezone": app.config["CELERY_TIMEZONE"], "broker_transport_options": app.config["BROKER_TRANSPORT_OPTIONS"], "task_queues": app.config["CELERY_QUEUES"], diff --git a/app/celery/letters_pdf_tasks.py b/app/celery/letters_pdf_tasks.py index 059f0fc940..e40a10b366 100644 --- a/app/celery/letters_pdf_tasks.py +++ b/app/celery/letters_pdf_tasks.py @@ -1,392 +1,51 @@ -import base64 -import math -from base64 import urlsafe_b64encode -from datetime import datetime -from hashlib import sha512 -from json import JSONDecodeError -from uuid import UUID - -from botocore.exceptions import ClientError as BotoClientError -from flask import current_app -from notifications_utils.s3 import s3upload from notifications_utils.statsd_decorators import statsd -from PyPDF2.utils import PdfReadError -from requests import RequestException -from requests import post as requests_post from app import notify_celery -from app.aws import s3 -from app.config import QueueNames, TaskNames from app.cronitor import cronitor -from app.dao.notifications_dao import ( - dao_get_notification_by_reference, - dao_get_notifications_by_references, - dao_update_notification, - dao_update_notifications_by_reference, - get_notification_by_id, - update_notification_status_by_id, -) -from app.errors import VirusScanError -from app.letters.utils import ( - ScanErrorType, - copy_redaction_failed_pdf, - get_file_names_from_error_bucket, - get_folder_name, - get_page_count, - get_reference_from_filename, - move_error_pdf_to_scan_bucket, - move_failed_pdf, - move_scan_to_invalid_pdf_bucket, - upload_letter_pdf, -) -from app.models import ( - KEY_TYPE_TEST, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_VALIDATION_FAILED, - NOTIFICATION_VIRUS_SCAN_FAILED, -) -from celery.exceptions import MaxRetriesExceededError @notify_celery.task(bind=True, name="create-letters-pdf", max_retries=15, default_retry_delay=300) @statsd(namespace="tasks") def create_letters_pdf(self, notification_id): - try: - notification = get_notification_by_id(notification_id, _raise=True) - pdf_data, billable_units = get_letters_pdf( - notification.template, - contact_block=notification.reply_to_text, - filename=notification.service.letter_branding and notification.service.letter_branding.filename, - values=notification.personalisation, - ) - - upload_letter_pdf(notification, pdf_data) - - if notification.key_type != KEY_TYPE_TEST: - notification.billable_units = billable_units - dao_update_notification(notification) - - current_app.logger.info( - "Letter notification reference {reference}: billable units set to {billable_units}".format( - reference=str(notification.reference), billable_units=billable_units - ) - ) - - except (RequestException, BotoClientError): - try: - current_app.logger.exception("Letters PDF notification creation for id: {} failed".format(notification_id)) - self.retry(queue=QueueNames.RETRY) - except MaxRetriesExceededError: - current_app.logger.error( - "RETRY FAILED: task create_letters_pdf failed for notification {}".format(notification_id), - ) - update_notification_status_by_id(notification_id, "technical-failure") + pass def get_letters_pdf(template, contact_block, filename, values): - template_for_letter_print = { - "subject": template.subject, - "content": template.content, - } - - data = { - "letter_contact_block": contact_block, - "template": template_for_letter_print, - "values": values, - "filename": filename, - } - resp = requests_post( - "{}/print.pdf".format(current_app.config["TEMPLATE_PREVIEW_API_HOST"]), - json=data, - headers={"Authorization": "Token {}".format(current_app.config["TEMPLATE_PREVIEW_API_KEY"])}, - ) - resp.raise_for_status() - - pages_per_sheet = 2 - billable_units = math.ceil(int(resp.headers.get("X-pdf-page-count", 0)) / pages_per_sheet) - - return resp.content, billable_units + pass @notify_celery.task(name="collate-letter-pdfs-for-day") @cronitor("collate-letter-pdfs-for-day") def collate_letter_pdfs_for_day(date=None): - if not date: - # Using the truncated date is ok because UTC to BST does not make a difference to the date, - # since it is triggered mid afternoon. - date = datetime.utcnow().strftime("%Y-%m-%d") - - letter_pdfs = sorted( - s3.get_s3_bucket_objects(current_app.config["LETTERS_PDF_BUCKET_NAME"], subfolder=date), - key=lambda letter: letter["Key"], - ) - for i, letters in enumerate(group_letters(letter_pdfs)): - filenames = [letter["Key"] for letter in letters] - - hash = urlsafe_b64encode(sha512("".join(filenames).encode()).digest())[:20].decode() - # eg NOTIFY.2018-12-31.001.Wjrui5nAvObjPd-3GEL-.ZIP - dvla_filename = "NOTIFY.{date}.{num:03}.{hash}.ZIP".format(date=date, num=i + 1, hash=hash) - - current_app.logger.info( - "Calling task zip-and-send-letter-pdfs for {} pdfs to upload {} with total size {:,} bytes".format( - len(filenames), dvla_filename, sum(letter["Size"] for letter in letters) - ) - ) - notify_celery.send_task( - name=TaskNames.ZIP_AND_SEND_LETTER_PDFS, - kwargs={"filenames_to_zip": filenames, "upload_filename": dvla_filename}, - queue=QueueNames.PROCESS_FTP, - compression="zlib", - ) + pass def group_letters(letter_pdfs): - """ - Group letters in chunks of MAX_LETTER_PDF_ZIP_FILESIZE. Will add files to lists, never going over that size. - If a single file is (somehow) larger than MAX_LETTER_PDF_ZIP_FILESIZE that'll be in a list on it's own. - If there are no files, will just exit (rather than yielding an empty list). - """ - running_filesize = 0 - list_of_files = [] - for letter in letter_pdfs: - if letter["Key"].lower().endswith(".pdf") and letter_in_created_state(letter["Key"]): - if ( - running_filesize + letter["Size"] > current_app.config["MAX_LETTER_PDF_ZIP_FILESIZE"] - or len(list_of_files) >= current_app.config["MAX_LETTER_PDF_COUNT_PER_ZIP"] - ): - yield list_of_files - running_filesize = 0 - list_of_files = [] - - running_filesize += letter["Size"] - list_of_files.append(letter) - - if list_of_files: - yield list_of_files + pass def letter_in_created_state(filename): - # filename looks like '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF' - subfolder = filename.split("/")[0] - ref = get_reference_from_filename(filename) - notifications = dao_get_notifications_by_references([ref]) - if notifications: - if notifications[0].status == NOTIFICATION_CREATED: - return True - current_app.logger.info( - "Collating letters for {} but notification with reference {} already in {}".format( - subfolder, ref, notifications[0].status - ) - ) - return False + pass @notify_celery.task(bind=True, name="process-virus-scan-passed", max_retries=15, default_retry_delay=300) def process_virus_scan_passed(self, filename): - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - current_app.logger.info("notification id {} Virus scan passed: {}".format(notification.id, filename)) - - is_test_key = notification.key_type == KEY_TYPE_TEST - - scan_pdf_object = s3.get_s3_object(current_app.config["LETTERS_SCAN_BUCKET_NAME"], filename) - old_pdf = scan_pdf_object.get()["Body"].read() - - try: - billable_units = get_page_count(old_pdf) - except PdfReadError: - current_app.logger.exception(msg="Invalid PDF received for notification_id: {}".format(notification.id)) - _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object) - return - - sanitise_response = _sanitise_precompiled_pdf(self, notification, old_pdf) - if not sanitise_response: - new_pdf = None - else: - sanitise_response = sanitise_response.json() - try: - new_pdf = base64.b64decode(sanitise_response["file"].encode()) - except JSONDecodeError: - new_pdf = sanitise_response.content - - redaction_failed_message = sanitise_response.get("redaction_failed_message") - if redaction_failed_message and not is_test_key: - current_app.logger.info("{} for notification id {} ({})".format(redaction_failed_message, notification.id, filename)) - copy_redaction_failed_pdf(filename) - - # TODO: Remove this once CYSP update their template to not cross over the margins - if notification.service_id == UUID("fe44178f-3b45-4625-9f85-2264a36dd9ec"): # CYSP - # Check your state pension submit letters with good addresses and notify tags, so just use their supplied pdf - new_pdf = old_pdf - - if not new_pdf: - current_app.logger.info("Invalid precompiled pdf received {} ({})".format(notification.id, filename)) - _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object) - return - else: - current_app.logger.info("Validation was successful for precompiled pdf {} ({})".format(notification.id, filename)) - - current_app.logger.info("notification id {} ({}) sanitised and ready to send".format(notification.id, filename)) - - try: - _upload_pdf_to_test_or_live_pdf_bucket(new_pdf, filename, is_test_letter=is_test_key) - - update_letter_pdf_status( - reference=reference, - status=NOTIFICATION_DELIVERED if is_test_key else NOTIFICATION_CREATED, - billable_units=billable_units, - ) - scan_pdf_object.delete() - except BotoClientError: - current_app.logger.exception("Error uploading letter to live pdf bucket for notification: {}".format(notification.id)) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - - -def _move_invalid_letter_and_update_status(notification, filename, scan_pdf_object): - try: - move_scan_to_invalid_pdf_bucket(filename) - scan_pdf_object.delete() - - update_letter_pdf_status( - reference=notification.reference, - status=NOTIFICATION_VALIDATION_FAILED, - billable_units=0, - ) - except BotoClientError: - current_app.logger.exception("Error when moving letter with id {} to invalid PDF bucket".format(notification.id)) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - - -def _upload_pdf_to_test_or_live_pdf_bucket(pdf_data, filename, is_test_letter): - target_bucket_config = "TEST_LETTERS_BUCKET_NAME" if is_test_letter else "LETTERS_PDF_BUCKET_NAME" - target_bucket_name = current_app.config[target_bucket_config] - target_filename = get_folder_name(datetime.utcnow(), is_test_letter) + filename - - s3upload( - filedata=pdf_data, - region=current_app.config["AWS_REGION"], - bucket_name=target_bucket_name, - file_location=target_filename, - ) - - -def _sanitise_precompiled_pdf(self, notification, precompiled_pdf): - try: - response = requests_post( - "{}/precompiled/sanitise".format(current_app.config["TEMPLATE_PREVIEW_API_HOST"]), - data=precompiled_pdf, - headers={ - "Authorization": "Token {}".format(current_app.config["TEMPLATE_PREVIEW_API_KEY"]), - "Service-ID": str(notification.service_id), - "Notification-ID": str(notification.id), - }, - ) - response.raise_for_status() - return response - except RequestException as ex: - if ex.response is not None and ex.response.status_code == 400: - message = "sanitise_precompiled_pdf validation error for notification: {}. ".format(notification.id) - if "message" in response.json(): - message += response.json()["message"] - - current_app.logger.info(message) - return None - - try: - current_app.logger.exception("sanitise_precompiled_pdf failed for notification: {}".format(notification.id)) - self.retry(queue=QueueNames.RETRY) - except MaxRetriesExceededError: - current_app.logger.error( - "RETRY FAILED: sanitise_precompiled_pdf failed for notification {}".format(notification.id), - ) - - notification.status = NOTIFICATION_TECHNICAL_FAILURE - dao_update_notification(notification) - raise + pass @notify_celery.task(name="process-virus-scan-failed") def process_virus_scan_failed(filename): - move_failed_pdf(filename, ScanErrorType.FAILURE) - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - updated_count = update_letter_pdf_status(reference, NOTIFICATION_VIRUS_SCAN_FAILED, billable_units=0) - - if updated_count != 1: - raise Exception( - "There should only be one letter notification for each reference. Found {} notifications".format(updated_count) - ) - - error = VirusScanError("notification id {} Virus scan failed: {}".format(notification.id, filename)) - current_app.logger.exception(error) - raise error + pass @notify_celery.task(name="process-virus-scan-error") def process_virus_scan_error(filename): - move_failed_pdf(filename, ScanErrorType.ERROR) - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - updated_count = update_letter_pdf_status(reference, NOTIFICATION_TECHNICAL_FAILURE, billable_units=0) - - if updated_count != 1: - raise Exception( - "There should only be one letter notification for each reference. Found {} notifications".format(updated_count) - ) - error = VirusScanError("notification id {} Virus scan error: {}".format(notification.id, filename)) - current_app.logger.exception(error) - raise error + pass def update_letter_pdf_status(reference, status, billable_units): - return dao_update_notifications_by_reference( - references=[reference], - update_dict={ - "status": status, - "billable_units": billable_units, - "updated_at": datetime.utcnow(), - }, - )[0] + pass def replay_letters_in_error(filename=None): - # This method can be used to replay letters that end up in the ERROR directory. - # We had an incident where clamAV was not processing the virus scan. - if filename: - move_error_pdf_to_scan_bucket(filename) - # call task to add the filename to anti virus queue - current_app.logger.info("Calling scan_file for: {}".format(filename)) - - if current_app.config["ANTIVIRUS_ENABLED"]: - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={"filename": filename}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - process_virus_scan_passed.apply_async( - kwargs={"filename": filename}, - queue=QueueNames.LETTERS, - ) - else: - error_files = get_file_names_from_error_bucket() - for item in error_files: - moved_file_name = item.key.split("/")[1] - current_app.logger.info("Calling scan_file for: {}".format(moved_file_name)) - move_error_pdf_to_scan_bucket(moved_file_name) - # call task to add the filename to anti virus queue - if current_app.config["ANTIVIRUS_ENABLED"]: - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={"filename": moved_file_name}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - process_virus_scan_passed.apply_async( - kwargs={"filename": moved_file_name}, - queue=QueueNames.LETTERS, - ) + pass diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index ea31db13fa..4c3e5832d1 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -1,4 +1,5 @@ from datetime import datetime, timedelta +from typing import List import pytz from flask import current_app @@ -12,7 +13,7 @@ from app.config import QueueNames from app.cronitor import cronitor from app.dao.inbound_sms_dao import delete_inbound_sms_older_than_retention -from app.dao.jobs_dao import dao_archive_job, dao_get_jobs_older_than_data_retention +from app.dao.jobs_dao import dao_archive_jobs, dao_get_jobs_older_than_data_retention from app.dao.notifications_dao import ( dao_timeout_notifications, delete_notifications_older_than_retention_by_type, @@ -37,23 +38,37 @@ @notify_celery.task(name="remove_sms_email_jobs") @cronitor("remove_sms_email_jobs") @statsd(namespace="tasks") -def remove_sms_email_csv_files(): - _remove_csv_files([EMAIL_TYPE, SMS_TYPE]) +def remove_sms_email_jobs(): + """ + Remove csv files from s3 and archive email and sms jobs older than data retention period. + """ + + _archive_jobs([EMAIL_TYPE, SMS_TYPE]) @notify_celery.task(name="remove_letter_jobs") @cronitor("remove_letter_jobs") @statsd(namespace="tasks") -def remove_letter_csv_files(): - _remove_csv_files([LETTER_TYPE]) +def remove_letter_jobs(): + _archive_jobs([LETTER_TYPE]) -def _remove_csv_files(job_types): - jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) - for job in jobs: - s3.remove_job_from_s3(job.service_id, job.id) - dao_archive_job(job) - current_app.logger.info("Job ID {} has been removed from s3.".format(job.id)) +def _archive_jobs(job_types: List[str]): + """ + Remove csv files from s3 and archive jobs older than data retention period. + + Args: + job_types (List[str]): list of job types to remove csv files and archive jobs for + """ + + while True: + jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=100) + if len(jobs) == 0: + break + current_app.logger.info("Archiving {} jobs.".format(len(jobs))) + s3.remove_jobs_from_s3(jobs) + dao_archive_jobs(jobs) + current_app.logger.info(f"Jobs archived: {[job.id for job in jobs]}") @notify_celery.task(name="delete-sms-notifications") @@ -119,9 +134,9 @@ def timeout_notifications(): # queue callback task only if the service_callback_api exists service_callback_api = get_service_delivery_status_callback_api_for_service(service_id=notification.service_id) if service_callback_api: - encrypted_notification = create_delivery_status_callback_data(notification, service_callback_api) + signed_notification = create_delivery_status_callback_data(notification, service_callback_api) send_delivery_status_to_service.apply_async( - [str(notification.id), encrypted_notification], + [str(notification.id), signed_notification], queue=QueueNames.CALLBACKS, ) @@ -148,7 +163,6 @@ def send_daily_performance_platform_stats(date=None): date = datetime.strptime(date, "%Y-%m-%d").date() if performance_platform_client.active: - send_total_sent_notifications_to_performance_platform(bst_date=date) processing_time.send_processing_time_to_performance_platform(bst_date=date) diff --git a/app/celery/process_pinpoint_receipts_tasks.py b/app/celery/process_pinpoint_receipts_tasks.py new file mode 100644 index 0000000000..d5bbb4d1bc --- /dev/null +++ b/app/celery/process_pinpoint_receipts_tasks.py @@ -0,0 +1,154 @@ +from datetime import datetime +from typing import Union + +from flask import current_app, json +from notifications_utils.statsd_decorators import statsd +from sqlalchemy.orm.exc import NoResultFound + +from app import notify_celery, statsd_client +from app.config import QueueNames +from app.dao import notifications_dao +from app.models import ( + NOTIFICATION_DELIVERED, + NOTIFICATION_PERMANENT_FAILURE, + NOTIFICATION_SENT, + NOTIFICATION_TECHNICAL_FAILURE, + NOTIFICATION_TEMPORARY_FAILURE, + PINPOINT_PROVIDER, +) +from app.notifications.callbacks import _check_and_queue_callback_task +from celery.exceptions import Retry + +# Pinpoint receipts are of the form: +# { +# "eventType": "TEXT_DELIVERED", +# "eventVersion": "1.0", +# "eventTimestamp": 1712944268877, +# "isFinal": true, +# "originationPhoneNumber": "+13655550100", +# "destinationPhoneNumber": "+16135550123", +# "isoCountryCode": "CA", +# "mcc": "302", +# "mnc": "610", +# "carrierName": "Bell Cellular Inc. / Aliant Telecom", +# "messageId": "221bc70c-7ee6-4987-b1ba-9684ba25be20", +# "messageRequestTimestamp": 1712944267685, +# "messageEncoding": "GSM", +# "messageType": "TRANSACTIONAL", +# "messageStatus": "DELIVERED", +# "messageStatusDescription": "Message has been accepted by phone", +# "totalMessageParts": 1, +# "totalMessagePrice": 0.00581, +# "totalCarrierFee": 0.006 +# } + + +@notify_celery.task(bind=True, name="process-pinpoint-result", max_retries=5, default_retry_delay=300) +@statsd(namespace="tasks") +def process_pinpoint_results(self, response): + try: + receipt = json.loads(response["Message"]) + reference = receipt["messageId"] + status = receipt["messageStatus"] + provider_response = receipt["messageStatusDescription"] + isFinal = receipt["isFinal"] + + notification_status = determine_pinpoint_status(status, provider_response, isFinal) + + if notification_status == NOTIFICATION_SENT: + return # we don't want to update the status to sent if it's already sent + + if not notification_status: + current_app.logger.warning(f"unhandled provider response for reference {reference}, received '{provider_response}'") + notification_status = NOTIFICATION_TECHNICAL_FAILURE # revert to tech failure by default + + try: + notification = notifications_dao.dao_get_notification_by_reference(reference) + except NoResultFound: + try: + current_app.logger.warning( + f"RETRY {self.request.retries}: notification not found for Pinpoint reference {reference} (update to {notification_status}). " + f"Callback may have arrived before notification was persisted to the DB. Adding task to retry queue" + ) + self.retry(queue=QueueNames.RETRY) + except self.MaxRetriesExceededError: + current_app.logger.warning( + f"notification not found for Pinpoint reference: {reference} (update to {notification_status}). Giving up." + ) + return + if notification.sent_by != PINPOINT_PROVIDER: + current_app.logger.exception(f"Pinpoint callback handled notification {notification.id} not sent by Pinpoint") + return + + if notification.status != NOTIFICATION_SENT: + notifications_dao._duplicate_update_warning(notification, notification_status) + return + + notifications_dao._update_notification_status( + notification=notification, + status=notification_status, + provider_response=provider_response, + ) + + if notification_status != NOTIFICATION_DELIVERED: + current_app.logger.info( + ( + f"Pinpoint delivery failed: notification id {notification.id} and reference {reference} has error found. " + f"Provider response: {provider_response}" + ) + ) + else: + current_app.logger.info( + f"Pinpoint callback return status of {notification_status} for notification: {notification.id}" + ) + + statsd_client.incr(f"callback.pinpoint.{notification_status}") + + if notification.sent_at: + statsd_client.timing_with_dates("callback.pinpoint.elapsed-time", datetime.utcnow(), notification.sent_at) + + _check_and_queue_callback_task(notification) + + except Retry: + raise + + except Exception as e: + current_app.logger.exception(f"Error processing Pinpoint results: {str(e)}") + self.retry(queue=QueueNames.RETRY) + + +def determine_pinpoint_status(status: str, provider_response: str, isFinal: bool) -> Union[str, None]: + """Determine the notification status based on the SMS status and provider response. + + Args: + status (str): message status from AWS + provider_response (str): detailed status from the SMS provider + isFinal (bool): whether this is the last update for this send + + Returns: + Union[str, None]: the notification status or None if the status is not handled + """ + + if status == "DELIVERED" or status == "SUCCESSFUL" and isFinal: + return NOTIFICATION_DELIVERED + elif status == "SUCCESSFUL": # carrier has accepted the message but it hasn't gone to the phone yet + return NOTIFICATION_SENT + + response_lower = provider_response.lower() + + if "blocked" in response_lower: + return NOTIFICATION_TECHNICAL_FAILURE + elif "invalid" in response_lower: + return NOTIFICATION_TECHNICAL_FAILURE + elif "is opted out" in response_lower: + return NOTIFICATION_PERMANENT_FAILURE + elif "unknown error" in response_lower: + return NOTIFICATION_TECHNICAL_FAILURE + elif "exceed max price" in response_lower: + return NOTIFICATION_TECHNICAL_FAILURE + elif "phone carrier is currently unreachable/unavailable" in response_lower: + return NOTIFICATION_TEMPORARY_FAILURE + elif "phone is currently unreachable/unavailable" in response_lower: + return NOTIFICATION_PERMANENT_FAILURE + else: + return None diff --git a/app/celery/process_ses_receipts_tasks.py b/app/celery/process_ses_receipts_tasks.py index fb8db1c7f3..640f918bf2 100644 --- a/app/celery/process_ses_receipts_tasks.py +++ b/app/celery/process_ses_receipts_tasks.py @@ -1,13 +1,13 @@ -from datetime import datetime, timedelta +from datetime import datetime -import iso8601 from flask import current_app, json from notifications_utils.statsd_decorators import statsd from sqlalchemy.orm.exc import NoResultFound -from app import notify_celery, statsd_client +from app import bounce_rate_client, notify_celery, statsd_client from app.config import QueueNames from app.dao import notifications_dao +from app.models import NOTIFICATION_DELIVERED, NOTIFICATION_PERMANENT_FAILURE from app.notifications.callbacks import _check_and_queue_callback_task from app.notifications.notifications_ses_callback import ( _check_and_queue_complaint_callback_task, @@ -29,7 +29,12 @@ default_retry_delay=300, ) @statsd(namespace="tasks") -def process_ses_results(self, response): +def process_ses_results(self, response): # noqa: C901 + # initialize these to None so error handling is simpler + notification = None + reference = None + notification_status = None + try: ses_message = json.loads(response["Message"]) notification_type = ses_message["notificationType"] @@ -38,28 +43,45 @@ def process_ses_results(self, response): _check_and_queue_complaint_callback_task(*handle_complaint(ses_message)) return True - aws_response_dict = get_aws_responses(ses_message) - - notification_status = aws_response_dict["notification_status"] reference = ses_message["mail"]["messageId"] - try: notification = notifications_dao.dao_get_notification_by_reference(reference) except NoResultFound: - message_time = iso8601.parse_date(ses_message["mail"]["timestamp"]).replace(tzinfo=None) - if datetime.utcnow() - message_time < timedelta(minutes=5): + try: + current_app.logger.warning( + f"RETRY {self.request.retries}: notification not found for SES reference {reference}. " + f"Callback may have arrived before notification was persisted to the DB. Adding task to retry queue" + ) + self.retry(queue=QueueNames.RETRY) + except self.MaxRetriesExceededError: + current_app.logger.warning(f"notification not found for SES reference: {reference}. Giving up.") + return + except Exception as e: + try: + current_app.logger.warning( + f"RETRY {self.request.retries}: notification not found for SES reference {reference}. " + f"There was an Error: {e}. Adding task to retry queue" + ) self.retry(queue=QueueNames.RETRY) - else: + except self.MaxRetriesExceededError: current_app.logger.warning( - "notification not found for reference: {} (update to {})".format(reference, notification_status) + f"notification not found for SES reference: {reference}. Error has persisted > number of retries. Giving up." ) return - notifications_dao._update_notification_status( - notification=notification, - status=notification_status, - provider_response=aws_response_dict["provider_response"], - ) + aws_response_dict = get_aws_responses(ses_message) + notification_status = aws_response_dict["notification_status"] + # Sometimes we get callback from the providers in the wrong order. If the notification has a + # permanent failure status, we don't want to overwrite it with a delivered status. + if notification.status == NOTIFICATION_PERMANENT_FAILURE and notification_status == NOTIFICATION_DELIVERED: + pass + else: + notifications_dao._update_notification_status( + notification=notification, + status=notification_status, + provider_response=aws_response_dict.get("provider_response", None), + bounce_response=aws_response_dict.get("bounce_response", None), + ) if not aws_response_dict["success"]: current_app.logger.info( @@ -74,6 +96,12 @@ def process_ses_results(self, response): statsd_client.incr("callback.ses.{}".format(notification_status)) + if notification_status == NOTIFICATION_PERMANENT_FAILURE: + bounce_rate_client.set_sliding_hard_bounce(notification.service_id, str(notification.id)) + current_app.logger.info( + f"Setting total hard bounce notifications for service {notification.service.id} with notification {notification.id} in REDIS" + ) + if notification.sent_at: statsd_client.timing_with_dates("callback.ses.elapsed-time", datetime.utcnow(), notification.sent_at) @@ -85,5 +113,13 @@ def process_ses_results(self, response): raise except Exception as e: - current_app.logger.exception("Error processing SES results: {}".format(type(e))) + notifcation_msg = "Notification ID: {}".format(notification.id) if notification else "No notification" + notification_status_msg = ( + "Notification status: {}".format(notification_status) if notification_status else "No notification status" + ) + ref_msg = "Reference ID: {}".format(reference) if reference else "No reference" + + current_app.logger.exception( + "Error processing SES results: {} [{}, {}, {}]".format(type(e), notifcation_msg, notification_status_msg, ref_msg) + ) self.retry(queue=QueueNames.RETRY) diff --git a/app/celery/process_sns_receipts_tasks.py b/app/celery/process_sns_receipts_tasks.py index 08676a4f0f..b84f626367 100644 --- a/app/celery/process_sns_receipts_tasks.py +++ b/app/celery/process_sns_receipts_tasks.py @@ -1,6 +1,5 @@ -from datetime import datetime, timedelta +from datetime import datetime -import iso8601 from flask import current_app, json from notifications_utils.statsd_decorators import statsd from sqlalchemy.orm.exc import NoResultFound @@ -30,23 +29,25 @@ def process_sns_results(self, response): sns_status = sns_message["status"] provider_response = sns_message["delivery"]["providerResponse"] - try: - notification_status = determine_status(sns_status, provider_response) - except KeyError: + notification_status = determine_status(sns_status, provider_response) + if not notification_status: current_app.logger.warning(f"unhandled provider response for reference {reference}, received '{provider_response}'") - notification_status = NOTIFICATION_TECHNICAL_FAILURE - provider_response = None + notification_status = NOTIFICATION_TECHNICAL_FAILURE # revert to tech failure by default try: notification = notifications_dao.dao_get_notification_by_reference(reference) except NoResultFound: - message_time = iso8601.parse_date(sns_message["notification"]["timestamp"]).replace(tzinfo=None) - if datetime.utcnow() - message_time < timedelta(minutes=5): + try: + current_app.logger.warning( + f"RETRY {self.request.retries}: notification not found for SNS reference {reference} (update to {notification_status}). " + f"Callback may have arrived before notification was persisted to the DB. Adding task to retry queue" + ) self.retry(queue=QueueNames.RETRY) - else: - current_app.logger.warning(f"notification not found for reference: {reference} (update to {notification_status})") + except self.MaxRetriesExceededError: + current_app.logger.warning( + f"notification not found for SNS reference: {reference} (update to {notification_status}). Giving up." + ) return - if notification.sent_by != SNS_PROVIDER: current_app.logger.exception(f"SNS callback handled notification {notification.id} not sent by SNS") return @@ -58,7 +59,7 @@ def process_sns_results(self, response): notifications_dao._update_notification_status( notification=notification, status=notification_status, - provider_response=provider_response if notification_status == NOTIFICATION_TECHNICAL_FAILURE else None, + provider_response=provider_response, ) if notification_status != NOTIFICATION_DELIVERED: @@ -104,9 +105,15 @@ def determine_status(sns_status, provider_response): "Phone has blocked SMS": NOTIFICATION_TECHNICAL_FAILURE, "Phone is on a blocked list": NOTIFICATION_TECHNICAL_FAILURE, "Phone is currently unreachable/unavailable": NOTIFICATION_PERMANENT_FAILURE, - "Phone number is opted out": NOTIFICATION_TECHNICAL_FAILURE, + "Phone number is opted out": NOTIFICATION_PERMANENT_FAILURE, "This delivery would exceed max price": NOTIFICATION_TECHNICAL_FAILURE, "Unknown error attempting to reach phone": NOTIFICATION_TECHNICAL_FAILURE, } - return reasons[provider_response] + status = reasons.get(provider_response) # could be None + if not status: + # TODO: Pattern matching in Python 3.10 should simplify this overall function logic. + if "is opted out" in provider_response: + return NOTIFICATION_PERMANENT_FAILURE + + return status diff --git a/app/celery/provider_tasks.py b/app/celery/provider_tasks.py index 73c843ffc1..4f21f9f2d9 100644 --- a/app/celery/provider_tasks.py +++ b/app/celery/provider_tasks.py @@ -1,20 +1,25 @@ +from typing import Optional + from flask import current_app from notifications_utils.recipients import InvalidEmailError from notifications_utils.statsd_decorators import statsd from sqlalchemy.orm.exc import NoResultFound from app import notify_celery -from app.config import QueueNames +from app.celery.utils import CeleryParams +from app.config import Config from app.dao import notifications_dao from app.dao.notifications_dao import update_notification_status_by_id from app.delivery import send_to_providers from app.exceptions import ( InvalidUrlException, - MalwarePendingException, + MalwareDetectedException, + MalwareScanInProgressException, NotificationTechnicalFailureException, ) -from app.models import NOTIFICATION_TECHNICAL_FAILURE +from app.models import NOTIFICATION_TECHNICAL_FAILURE, Notification from app.notifications.callbacks import _check_and_queue_callback_task +from celery import Task # Celery rate limits are per worker instance and not a global rate limit. @@ -37,55 +42,60 @@ def deliver_throttled_sms(self, notification_id): # Celery rate limits are per worker instance and not a global rate limit. # https://docs.celeryproject.org/en/stable/userguide/tasks.html#Task.rate_limit -# This task is dispatched through the `send-sms-tasks` queue. -# This queue is consumed by 6 Celery instances with 4 workers in production. -# The maximum throughput is therefore 6 instances * 4 workers = 24 tasks per second -# if we set rate_limit="1/s" on the Celery task +# We currently set rate_limit="1/s" on the Celery task and 4 workers per pod, and so a limit of 4 tasks per second per pod. +# The number of pods is controlled by the Kubernetes HPA and scales up and down with demand. +# Currently in production we have 3 celery-sms-send-primary pods, and up to 20 celery-sms-send-scalable pods +# This means we can send up to 92 messages per second. @notify_celery.task( bind=True, name="deliver_sms", max_retries=48, default_retry_delay=300, - rate_limit="1/s", + rate_limit=Config.CELERY_DELIVER_SMS_RATE_LIMIT, ) @statsd(namespace="tasks") def deliver_sms(self, notification_id): _deliver_sms(self, notification_id) +SCAN_RETRY_BACKOFF = 10 +SCAN_MAX_BACKOFF_RETRIES = 5 + + @notify_celery.task(bind=True, name="deliver_email", max_retries=48, default_retry_delay=300) @statsd(namespace="tasks") def deliver_email(self, notification_id): + notification = None try: - current_app.logger.info("Start sending email for notification id: {}".format(notification_id)) + current_app.logger.debug("Start sending email for notification id: {}".format(notification_id)) notification = notifications_dao.get_notification_by_id(notification_id) if not notification: raise NoResultFound() send_to_providers.send_email_to_provider(notification) except InvalidEmailError as e: - current_app.logger.info(f"Cannot send notification {notification_id}, got an invalid email address: {str(e)}.") + if not notification.to.isascii(): + current_app.logger.info(f"Cannot send notification {notification_id} (has a non-ascii email address): {str(e)}") + else: + current_app.logger.info(f"Cannot send notification {notification_id}, got an invalid email address: {str(e)}.") update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE) _check_and_queue_callback_task(notification) except InvalidUrlException: current_app.logger.error(f"Cannot send notification {notification_id}, got an invalid direct file url.") update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE) _check_and_queue_callback_task(notification) - except MalwarePendingException: - current_app.logger.info("RETRY: Email notification {} is pending malware scans".format(notification_id)) - self.retry(queue=QueueNames.RETRY, countdown=60) - except Exception: - try: - current_app.logger.exception("RETRY: Email notification {} failed".format(notification_id)) - self.retry(queue=QueueNames.RETRY) - except self.MaxRetriesExceededError: - message = ( - "RETRY FAILED: Max retries reached. " - "The task send_email_to_provider failed for notification {}. " - "Notification has been updated to technical-failure".format(notification_id) - ) - update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE) - _check_and_queue_callback_task(notification) - raise NotificationTechnicalFailureException(message) + except MalwareDetectedException: + _check_and_queue_callback_task(notification) + except MalwareScanInProgressException as me: + if self.request.retries <= SCAN_MAX_BACKOFF_RETRIES: + countdown = SCAN_RETRY_BACKOFF * (self.request.retries + 1) + else: + countdown = None + current_app.logger.warning( + "RETRY {}: Email notification {} is waiting on pending malware scanning".format(self.request.retries, notification_id) + ) + _handle_error_with_email_retry(self, me, notification_id, notification, countdown) + except Exception as e: + _handle_error_with_email_retry(self, e, notification_id, notification) def _deliver_sms(self, notification_id): @@ -102,10 +112,7 @@ def _deliver_sms(self, notification_id): except Exception: try: current_app.logger.exception("SMS notification delivery for id: {} failed".format(notification_id)) - if self.request.retries == 0: - self.retry(queue=QueueNames.RETRY, countdown=0) - else: - self.retry(queue=QueueNames.RETRY) + self.retry(**CeleryParams.retry(None if notification is None else notification.template.process_type)) except self.MaxRetriesExceededError: message = ( "RETRY FAILED: Max retries reached. The task send_sms_to_provider failed for notification {}. " @@ -114,3 +121,28 @@ def _deliver_sms(self, notification_id): update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE) _check_and_queue_callback_task(notification) raise NotificationTechnicalFailureException(message) + + +def _handle_error_with_email_retry( + task: Task, e: Exception, notification_id: int, notification: Optional[Notification], countdown: Optional[None] = None +): + try: + if task.request.retries <= 10: + current_app.logger.warning("RETRY {}: Email notification {} failed".format(task.request.retries, notification_id)) + else: + current_app.logger.exception("RETRY: Email notification {} failed".format(notification_id), exc_info=e) + # There is an edge case when a notification is not found in the database. + if notification is None or notification.template is None: + task.retry(**CeleryParams.retry(countdown=countdown)) + else: + task.retry(**CeleryParams.retry(notification.template.process_type, countdown)) + except task.MaxRetriesExceededError: + message = ( + "RETRY FAILED: Max retries reached. " + "The task send_email_to_provider failed for notification {}. " + "Notification has been updated to technical-failure".format(notification_id) + ) + update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE) + if notification is not None: + _check_and_queue_callback_task(notification) + raise NotificationTechnicalFailureException(message) diff --git a/app/celery/research_mode_tasks.py b/app/celery/research_mode_tasks.py index cae829f09c..cf1c013f8c 100644 --- a/app/celery/research_mode_tasks.py +++ b/app/celery/research_mode_tasks.py @@ -6,6 +6,8 @@ from app import create_uuid, notify_celery from app.aws.mocks import ( + pinpoint_delivered_callback, + pinpoint_failed_callback, ses_hard_bounce_callback, ses_notification_callback, ses_soft_bounce_callback, @@ -14,9 +16,11 @@ sns_success_callback, ) from app.aws.s3 import file_exists +from app.celery.process_pinpoint_receipts_tasks import process_pinpoint_results from app.celery.process_ses_receipts_tasks import process_ses_results from app.celery.process_sns_receipts_tasks import process_sns_results from app.config import QueueNames +from app.models import PINPOINT_PROVIDER, SNS_PROVIDER temp_fail = "+15149301633" perm_fail = "+15149301632" @@ -29,8 +33,14 @@ def send_sms_response(provider, to, reference=None): reference = reference or str(create_uuid()) - body = aws_sns_callback(reference, to) - process_sns_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + if provider == SNS_PROVIDER: + body = aws_sns_callback(reference, to) + process_sns_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + elif provider == PINPOINT_PROVIDER: + body = aws_pinpoint_callback(reference, to) + process_pinpoint_results.apply_async([body], queue=QueueNames.RESEARCH_MODE) + else: + raise ValueError("Provider {} not supported".format(provider)) return reference @@ -64,6 +74,25 @@ def aws_sns_callback(notification_id, to): return sns_success_callback(notification_id, destination=to, timestamp=timestamp) +def aws_pinpoint_callback(notification_id, to): + now = datetime.now() + timestamp = now.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + using_test_perm_fail_number = to.strip().endswith(perm_fail) + using_test_temp_fail_number = to.strip().endswith(temp_fail) + + if using_test_perm_fail_number or using_test_temp_fail_number: + return pinpoint_failed_callback( + "Phone is currently unreachable/unavailable" + if using_test_perm_fail_number + else "Phone carrier is currently unreachable/unavailable", + notification_id, + destination=to, + timestamp=timestamp, + ) + else: + return pinpoint_delivered_callback(notification_id, destination=to, timestamp=timestamp) + + @notify_celery.task( bind=True, name="create-fake-letter-response-file", diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 226d6486ad..b8626e1375 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -1,12 +1,28 @@ from datetime import datetime, timedelta +from typing import List, cast from flask import current_app from notifications_utils.statsd_decorators import statsd from sqlalchemy import and_ from sqlalchemy.exc import SQLAlchemyError -from app import notify_celery, zendesk_client -from app.celery.tasks import process_job +from app import ( + email_bulk, + email_normal, + email_priority, + notify_celery, + sms_bulk, + sms_normal, + sms_priority, + zendesk_client, +) +from app.celery.tasks import ( + job_complete, + process_job, + save_emails, + save_smss, + update_in_progress_jobs, +) from app.config import QueueNames, TaskNames from app.dao.invited_org_user_dao import ( delete_org_invitations_created_more_than_two_days_ago, @@ -17,6 +33,7 @@ dao_get_scheduled_notifications, dao_old_letters_with_created_status, dao_precompiled_letters_still_pending_virus_check, + get_notification_count_for_job, is_delivery_slow_for_provider, notifications_not_yet_sent, set_scheduled_notification_to_processed, @@ -32,6 +49,11 @@ ) from app.notifications.process_notifications import send_notification_to_queue from app.v2.errors import JobIncompleteError +from celery import Task + +# https://stackoverflow.com/questions/63714223/correct-type-annotation-for-a-celery-task +save_smss = cast(Task, save_smss) +save_emails = cast(Task, save_emails) @notify_celery.task(name="run-scheduled-jobs") @@ -46,6 +68,29 @@ def run_scheduled_jobs(): raise +@notify_celery.task(name="mark-jobs-complete") +@statsd(namespace="tasks") +def mark_jobs_complete(): + # query for jobs that are not yet complete + jobs_not_complete = ( + Job.query.filter(Job.job_status.in_([JOB_STATUS_IN_PROGRESS, JOB_STATUS_ERROR])).order_by(Job.processing_started).all() + ) + + try: + for job in jobs_not_complete: + # check if all notifications for that job are sent + notification_count = get_notification_count_for_job(job.service_id, job.id) + + # if so, mark job as complete + if notification_count >= job.notification_count: + job_complete(job) + current_app.logger.info(f"Job ID {str(job.id)} marked as complete") + + except SQLAlchemyError: + current_app.logger.exception("Failed to mark jobs complete") + raise + + @notify_celery.task(name="send-scheduled-notifications") @statsd(namespace="tasks") def send_scheduled_notifications(): @@ -122,30 +167,31 @@ def check_job_status(): from jobs where job_status == 'in progress' and template_type in ('sms', 'email') - and scheduled_at or created_at is older that 120 minutes. + and scheduled_at or created_at is older than 30 minutes. if any results then raise error process the rows in the csv that are missing (in another task) just do the check here. """ - minutes_ago_120 = datetime.utcnow() - timedelta(minutes=120) - minutes_ago_125 = datetime.utcnow() - timedelta(minutes=125) + minutes_ago_30 = datetime.utcnow() - timedelta(minutes=30) + minutes_ago_35 = datetime.utcnow() - timedelta(minutes=35) + update_in_progress_jobs() - jobs_not_complete_after_120_minutes = ( + jobs_not_complete_after_30_minutes = ( Job.query.filter( Job.job_status == JOB_STATUS_IN_PROGRESS, and_( - minutes_ago_125 < Job.processing_started, - Job.processing_started < minutes_ago_120, + minutes_ago_35 < Job.updated_at, + Job.updated_at < minutes_ago_30, ), ) - .order_by(Job.processing_started) + .order_by(Job.updated_at) .all() ) # temporarily mark them as ERROR so that they don't get picked up by future check_job_status tasks # if they haven't been re-processed in time. - job_ids = [] - for job in jobs_not_complete_after_120_minutes: + job_ids: List[str] = [] # type: ignore + for job in jobs_not_complete_after_30_minutes: job.job_status = JOB_STATUS_ERROR dao_update_job(job) job_ids.append(str(job.id)) @@ -220,3 +266,122 @@ def check_templated_letter_state(): message=msg, ticket_type=zendesk_client.TYPE_INCIDENT, ) + + +@notify_celery.task(name="in-flight-to-inbox") +@statsd(namespace="tasks") +def recover_expired_notifications(): + sms_bulk.expire_inflights() + sms_normal.expire_inflights() + sms_priority.expire_inflights() + email_bulk.expire_inflights() + email_normal.expire_inflights() + email_priority.expire_inflights() + + +@notify_celery.task(name="beat-inbox-email-normal") +@statsd(namespace="tasks") +def beat_inbox_email_normal(): + """ + The function acts as a beat schedule to a list of notifications in the queue. + The post_api will push all the notifications with normal priority into the above list. + The heartbeat with check the list (list#1) until it is non-emtpy and move the notifications in a batch + to another list(list#2). The heartbeat will then call a job that saves list#2 to the DB + and actually sends the email for each notification saved. + """ + receipt_id_email, list_of_email_notifications = email_normal.poll() + + while list_of_email_notifications: + save_emails.apply_async((None, list_of_email_notifications, receipt_id_email), queue=QueueNames.NORMAL_DATABASE) + current_app.logger.info(f"Batch saving with Normal Priority: email receipt {receipt_id_email} sent to in-flight.") + receipt_id_email, list_of_email_notifications = email_normal.poll() + + +@notify_celery.task(name="beat-inbox-email-bulk") +@statsd(namespace="tasks") +def beat_inbox_email_bulk(): + """ + The function acts as a beat schedule to a list of notifications in the queue. + The post_api will push all the notifications with bulk priority into the above list. + The heartbeat with check the list (list#1) until it is non-emtpy and move the notifications in a batch + to another list(list#2). The heartbeat will then call a job that saves list#2 to the DB + and actually sends the email for each notification saved. + """ + receipt_id_email, list_of_email_notifications = email_bulk.poll() + + while list_of_email_notifications: + save_emails.apply_async((None, list_of_email_notifications, receipt_id_email), queue=QueueNames.BULK_DATABASE) + current_app.logger.info(f"Batch saving with Bulk Priority: email receipt {receipt_id_email} sent to in-flight.") + receipt_id_email, list_of_email_notifications = email_bulk.poll() + + +@notify_celery.task(name="beat-inbox-email-priority") +@statsd(namespace="tasks") +def beat_inbox_email_priority(): + """ + The function acts as a beat schedule to a list of notifications in the queue. + The post_api will push all the notifications with priority into the above list. + The heartbeat with check the list (list#1) until it is non-emtpy and move the notifications in a batch + to another list(list#2). The heartbeat will then call a job that saves list#2 to the DB + and actually sends the email for each notification saved. + """ + receipt_id_email, list_of_email_notifications = email_priority.poll() + + while list_of_email_notifications: + save_emails.apply_async((None, list_of_email_notifications, receipt_id_email), queue=QueueNames.PRIORITY_DATABASE) + current_app.logger.info(f"Batch saving with Priority: email receipt {receipt_id_email} sent to in-flight.") + receipt_id_email, list_of_email_notifications = email_priority.poll() + + +@notify_celery.task(name="beat-inbox-sms-normal") +@statsd(namespace="tasks") +def beat_inbox_sms_normal(): + """ + The function acts as a beat schedule to a list of notifications in the queue. + The post_api will push all the notifications of normal priority into the above list. + The heartbeat with check the list (list#1) until it is non-emtpy and move the notifications in a batch + to another list(list#2). The heartbeat will then call a job that saves list#2 to the DB + and actually sends the sms for each notification saved. + """ + receipt_id_sms, list_of_sms_notifications = sms_normal.poll() + + while list_of_sms_notifications: + save_smss.apply_async((None, list_of_sms_notifications, receipt_id_sms), queue=QueueNames.NORMAL_DATABASE) + current_app.logger.info(f"Batch saving with Normal Priority: SMS receipt {receipt_id_sms} sent to in-flight.") + receipt_id_sms, list_of_sms_notifications = sms_normal.poll() + + +@notify_celery.task(name="beat-inbox-sms-bulk") +@statsd(namespace="tasks") +def beat_inbox_sms_bulk(): + """ + The function acts as a beat schedule to a list of notifications in the queue. + The post_api will push all the notifications of bulk priority into the above list. + The heartbeat with check the list (list#1) until it is non-emtpy and move the notifications in a batch + to another list(list#2). The heartbeat will then call a job that saves list#2 to the DB + and actually sends the sms for each notification saved. + """ + receipt_id_sms, list_of_sms_notifications = sms_bulk.poll() + + while list_of_sms_notifications: + save_smss.apply_async((None, list_of_sms_notifications, receipt_id_sms), queue=QueueNames.BULK_DATABASE) + current_app.logger.info(f"Batch saving with Bulk Priority: SMS receipt {receipt_id_sms} sent to in-flight.") + receipt_id_sms, list_of_sms_notifications = sms_bulk.poll() + + +@notify_celery.task(name="beat-inbox-sms-priority") +@statsd(namespace="tasks") +def beat_inbox_sms_priority(): + """ + The function acts as a beat schedule to a list of notifications in the queue. + The post_api will push all the notifications of priority into the above list. + The heartbeat with check the list (list#1) until it is non-emtpy and move the notifications in a batch + to another list(list#2). The heartbeat will then call a job that saves list#2 to the DB + and actually sends the sms for each notification saved. + """ + receipt_id_sms, list_of_sms_notifications = sms_priority.poll() + + while list_of_sms_notifications: + save_smss.apply_async((None, list_of_sms_notifications, receipt_id_sms), queue=QueueNames.PRIORITY_DATABASE) + current_app.logger.info(f"Batch saving with Priority: SMS receipt {receipt_id_sms} sent to in-flight.") + receipt_id_sms, list_of_sms_notifications = sms_priority.poll() diff --git a/app/celery/service_callback_tasks.py b/app/celery/service_callback_tasks.py index 87f54290d7..9296958f85 100644 --- a/app/celery/service_callback_tasks.py +++ b/app/celery/service_callback_tasks.py @@ -4,20 +4,21 @@ from notifications_utils.statsd_decorators import statsd from requests import HTTPError, RequestException, request -from app import encryption, notify_celery +from app import notify_celery, signer_complaint, signer_delivery_status from app.config import QueueNames @notify_celery.task(bind=True, name="send-delivery-status", max_retries=5, default_retry_delay=300) @statsd(namespace="tasks") -def send_delivery_status_to_service(self, notification_id, encrypted_status_update): - status_update = encryption.decrypt(encrypted_status_update) +def send_delivery_status_to_service(self, notification_id, signed_status_update): + status_update = signer_delivery_status.verify(signed_status_update) data = { "id": str(notification_id), "reference": status_update["notification_client_reference"], "to": status_update["notification_to"], "status": status_update["notification_status"], + "status_description": status_update["notification_status_description"], "provider_response": status_update["notification_provider_response"], "created_at": status_update["notification_created_at"], "completed_at": status_update["notification_updated_at"], @@ -36,7 +37,7 @@ def send_delivery_status_to_service(self, notification_id, encrypted_status_upda @notify_celery.task(bind=True, name="send-complaint", max_retries=5, default_retry_delay=300) @statsd(namespace="tasks") def send_complaint_to_service(self, complaint_data): - complaint = encryption.decrypt(complaint_data) + complaint = signer_complaint.verify(complaint_data) data = { "notification_id": complaint["notification_id"], @@ -58,37 +59,32 @@ def send_complaint_to_service(self, complaint_data): def _send_data_to_service_callback_api(self, data, service_callback_url, token, function_name): notification_id = data["notification_id"] if "notification_id" in data else data["id"] try: + current_app.logger.info("{} sending {} to {}".format(function_name, notification_id, service_callback_url)) response = request( method="POST", url=service_callback_url, data=json.dumps(data), headers={ "Content-Type": "application/json", - "Authorization": "Bearer {}".format(token), + "Authorization": f"Bearer {token}", }, - timeout=60, + timeout=5, ) + current_app.logger.info( - "{} sending {} to {}, response {}".format( - function_name, - notification_id, - service_callback_url, - response.status_code, - ) + f"{function_name} sending {notification_id} to {service_callback_url}, response {response.status_code}" ) + response.raise_for_status() except RequestException as e: current_app.logger.warning( - "{} request failed for notification_id: {} and url: {}. exc: {}".format( - function_name, notification_id, service_callback_url, e - ) + f"{function_name} request failed for notification_id: {notification_id} and url: {service_callback_url}. exc: {e}" ) - if not isinstance(e, HTTPError) or e.response.status_code >= 500: + # Retry if the response status code is server-side or 429 (too many requests). + if not isinstance(e, HTTPError) or e.response.status_code >= 500 or e.response.status_code == 429: try: - self.retry(queue=QueueNames.RETRY) + self.retry(queue=QueueNames.CALLBACKS_RETRY) except self.MaxRetriesExceededError: current_app.logger.warning( - "Retry: {} has retried the max num of times for callback url {} and notification_id: {}".format( - function_name, service_callback_url, notification_id - ) + "Retry: {function_name} has retried the max num of times for callback url {service_callback_url} and notification_id: {notification_id}" ) diff --git a/app/celery/tasks.py b/app/celery/tasks.py index 371267b12e..64f8dd669d 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -1,11 +1,17 @@ import json -from collections import defaultdict, namedtuple +from collections import namedtuple from datetime import datetime -from typing import Optional +from itertools import islice +from typing import Any, Dict, List, Optional +from uuid import UUID, uuid4 from flask import current_app -from notifications_utils.columns import Row -from notifications_utils.recipients import RecipientCSV +from itsdangerous import BadSignature +from more_itertools import chunked +from notifications_utils.recipients import ( + RecipientCSV, + try_validate_and_format_phone_number, +) from notifications_utils.statsd_decorators import statsd from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate from notifications_utils.timezones import convert_utc_to_local_timezone @@ -14,31 +20,35 @@ from app import ( DATETIME_FORMAT, - create_random_identifier, + bounce_rate_client, create_uuid, - encryption, + email_bulk, + email_normal, + email_priority, + metrics_logger, notify_celery, + signer_notification, + sms_bulk, + sms_normal, + sms_priority, statsd_client, ) from app.aws import s3 -from app.celery import ( # noqa: F401 - letters_pdf_tasks, - process_sns_receipts_tasks, - provider_tasks, - research_mode_tasks, +from app.aws.metrics import ( + put_batch_saving_bulk_created, + put_batch_saving_bulk_processed, ) -from app.config import QueueNames -from app.dao.daily_sorted_letter_dao import dao_create_or_update_daily_sorted_letter +from app.config import Config, Priorities, QueueNames from app.dao.inbound_sms_dao import dao_get_inbound_sms_by_id -from app.dao.jobs_dao import dao_get_job_by_id, dao_update_job +from app.dao.jobs_dao import dao_get_in_progress_jobs, dao_get_job_by_id, dao_update_job from app.dao.notifications_dao import ( dao_get_last_notification_added_for_job_id, dao_get_notification_history_by_reference, - dao_update_notifications_by_reference, + get_latest_sent_notification_for_job, get_notification_by_id, - update_notification_status_by_reference, + total_hard_bounces_grouped_by_hour, + total_notifications_grouped_by_hour, ) -from app.dao.provider_details_dao import get_current_provider from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id @@ -47,9 +57,10 @@ fetch_todays_total_message_count, ) from app.dao.templates_dao import dao_get_template_by_id -from app.exceptions import DVLAException, NotificationTechnicalFailureException +from app.encryption import SignedNotification +from app.exceptions import DVLAException from app.models import ( - DVLA_RESPONSE_STATUS_SENT, + BULK, EMAIL_TYPE, JOB_STATUS_CANCELLED, JOB_STATUS_FINISHED, @@ -58,25 +69,35 @@ JOB_STATUS_SENDING_LIMITS_EXCEEDED, KEY_TYPE_NORMAL, LETTER_TYPE, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_RETURNED_LETTER, - NOTIFICATION_SENDING, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_TEMPORARY_FAILURE, + NORMAL, + PRIORITY, SMS_TYPE, - DailySortedLetter, Job, + Notification, Service, Template, ) from app.notifications.process_notifications import ( - persist_notification, + persist_notifications, send_notification_to_queue, ) -from app.notifications.validators import check_service_over_daily_message_limit -from app.service.utils import service_allowed_to_send_to -from app.utils import get_csv_max_rows +from app.types import VerifiedNotification +from app.utils import get_csv_max_rows, get_delivery_queue_for_template +from app.v2.errors import ( + LiveServiceTooManyRequestsError, + LiveServiceTooManySMSRequestsError, + TrialServiceTooManyRequestsError, + TrialServiceTooManySMSRequestsError, +) + + +def update_in_progress_jobs(): + jobs = dao_get_in_progress_jobs() + for job in jobs: + notification = get_latest_sent_notification_for_job(job.id) + if notification is not None: + job.updated_at = notification.updated_at + dao_update_job(job) @notify_celery.task(name="process-job") @@ -111,14 +132,18 @@ def process_job(job_id): TemplateClass = get_template_class(db_template.template_type) template = TemplateClass(db_template.__dict__) + template.process_type = db_template.process_type - current_app.logger.debug("Starting job {} processing {} notifications".format(job_id, job.notification_count)) + current_app.logger.info("Starting job {} processing {} notifications".format(job_id, job.notification_count)) csv = get_recipient_csv(job, template) - for row in csv.get_rows(): - process_row(row, template, job, service) - job_complete(job, start=start) + rows = csv.get_rows() + for result in chunked(rows, Config.BATCH_INSERTION_CHUNK_SIZE): + process_rows(result, template, job, service) + put_batch_saving_bulk_created( + metrics_logger, 1, notification_type=db_template.template_type, priority=db_template.process_type + ) def job_complete(job: Job, resumed=False, start=None): @@ -136,47 +161,47 @@ def job_complete(job: Job, resumed=False, start=None): ) -def process_row(row: Row, template: Template, job: Job, service: Service): +def process_rows(rows: List, template: Template, job: Job, service: Service): template_type = template.template_type - encrypted = encryption.encrypt( - { - "api_key": job.api_key_id and str(job.api_key_id), - "template": str(template.id), - "template_version": job.template_version, - "job": str(job.id), - "to": row.recipient, - "row_number": row.index, - "personalisation": dict(row.personalisation), - "queue": queue_to_use(job.notification_count), - } - ) - - notification_id = create_uuid() - sender_id = str(job.sender_id) if job.sender_id else None - - send_fns = {SMS_TYPE: save_sms, EMAIL_TYPE: save_email, LETTER_TYPE: save_letter} - - send_fn = send_fns[template_type] - - task_kwargs = {} - if sender_id: - task_kwargs["sender_id"] = sender_id + encrypted_smss: List[SignedNotification] = [] + encrypted_emails: List[SignedNotification] = [] + for row in rows: + client_reference = row.get("reference", None) + signed_row = SignedNotification( + signer_notification.sign( + { + "api_key": job.api_key_id and str(job.api_key_id), # type: ignore + "key_type": job.api_key.key_type if job.api_key else KEY_TYPE_NORMAL, + "template": str(template.id), + "template_version": job.template_version, + "job": str(job.id), + "to": row.recipient, + "row_number": row.index, + "personalisation": dict(row.personalisation), + "queue": choose_sending_queue(str(template.process_type), template_type, job.notification_count), + "sender_id": sender_id, + "client_reference": client_reference.data, # will return None if missing + } + ) + ) + if template_type == SMS_TYPE: + encrypted_smss.append(signed_row) + if template_type == EMAIL_TYPE: + encrypted_emails.append(signed_row) # the same_sms and save_email task are going to be using template and service objects from cache # these objects are transient and will not have relationships loaded - if service_allowed_to_send_to(row.recipient, service, KEY_TYPE_NORMAL): - send_fn.apply_async( - ( - str(service.id), - notification_id, - encrypted, - ), - task_kwargs, - queue=QueueNames.DATABASE if not service.research_mode else QueueNames.RESEARCH_MODE, + if encrypted_smss: + save_smss.apply_async( + (str(service.id), encrypted_smss, None), + queue=choose_database_queue(str(template.process_type), service.research_mode, job.notification_count), + ) + if encrypted_emails: + save_emails.apply_async( + (str(service.id), encrypted_emails, None), + queue=choose_database_queue(str(template.process_type), service.research_mode, job.notification_count), ) - else: - current_app.logger.debug("SMS {} failed as restricted service".format(notification_id)) def __sending_limits_for_job_exceeded(service, job: Job, job_id): @@ -193,236 +218,296 @@ def __sending_limits_for_job_exceeded(service, job: Job, job_id): return False -@notify_celery.task(bind=True, name="save-sms", max_retries=5, default_retry_delay=300) +@notify_celery.task(bind=True, name="save-smss", max_retries=5, default_retry_delay=300) @statsd(namespace="tasks") -def save_sms(self, service_id, notification_id, encrypted_notification, sender_id=None): - notification = encryption.decrypt(encrypted_notification) - service = dao_fetch_service_by_id(service_id, use_cache=True) - template = dao_get_template_by_id(notification["template"], version=notification["template_version"], use_cache=True) - - if sender_id: - reply_to_text = dao_get_service_sms_senders_by_id(service_id, sender_id).sms_sender - if isinstance(template, tuple): - template = template[0] - # if the template is obtained from cache a tuple will be returned where - # the first element is the Template object and the second the template cache data - # in the form of a dict - elif isinstance(template, tuple): - reply_to_text = template[1].get("reply_to_text") - template = template[0] - else: - reply_to_text = template.get_reply_to_text() +def save_smss(self, service_id: Optional[str], signed_notifications: List[SignedNotification], receipt: Optional[UUID]): + """ + Function that takes a list of signed notifications, stores + them in the DB and then sends these to the queue. If the receipt + is not None then it is passed to the RedisQueue to let it know it + can delete the inflight notifications. + """ + verified_notifications: List[VerifiedNotification] = [] + notification_id_queue: Dict = {} + saved_notifications: List[Notification] = [] + for signed_notification in signed_notifications: + try: + _notification = signer_notification.verify(signed_notification) + except BadSignature: + current_app.logger.exception(f"Invalid signature for signed_notification {signed_notification}") + raise + service_id = _notification.get("service_id", service_id) # take it it out of the notification if it's there + service = dao_fetch_service_by_id(service_id, use_cache=True) + + template = dao_get_template_by_id( + _notification.get("template"), version=_notification.get("template_version"), use_cache=True + ) + # todo: _notification may not have "sender_id" key + sender_id = _notification.get("sender_id") # type: ignore + notification_id = _notification.get("id", create_uuid()) - # if the service is obtained from cache a tuple will be returned where - # the first element is the Service object and the second the service cache data - # in the form of a dict - if isinstance(service, tuple): - service = service[0] + if "reply_to_text" in _notification and _notification["reply_to_text"]: + reply_to_text = _notification["reply_to_text"] + else: + reply_to_text = "" # type: ignore + if sender_id: + reply_to_text = try_validate_and_format_phone_number( + dao_get_service_sms_senders_by_id(service_id, sender_id).sms_sender + ) + elif template.service: + reply_to_text = template.get_reply_to_text() + else: + reply_to_text = service.get_default_sms_sender() # type: ignore + + notification: VerifiedNotification = { + **_notification, # type: ignore + "notification_id": notification_id, + "reply_to_text": reply_to_text, + "service": service, + "key_type": _notification.get("key_type", KEY_TYPE_NORMAL), + "template_id": template.id, + "template_version": template.version, + "recipient": _notification.get("to"), + "personalisation": _notification.get("personalisation"), + "notification_type": SMS_TYPE, # type: ignore + "simulated": _notification.get("simulated", None), + "api_key_id": _notification.get("api_key", None), + "created_at": datetime.utcnow(), + "job_id": _notification.get("job", None), + "job_row_number": _notification.get("row_number", None), + } - check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) + verified_notifications.append(notification) + notification_id_queue[notification_id] = notification.get("queue") # type: ignore + process_type = template.process_type # type: ignore try: - # this task is used by two main things... process_job and process_sms_or_email_notification - # if the data is not present in the encrypted data then fallback on whats needed for process_job - saved_notification = persist_notification( - notification_id=notification.get("id", notification_id), - template_id=notification["template"], - template_version=notification["template_version"], - recipient=notification["to"], - service=service, - personalisation=notification.get("personalisation"), - notification_type=SMS_TYPE, - simulated=notification.get("simulated", None), - api_key_id=notification.get("api_key", None), - key_type=notification.get("key_type", KEY_TYPE_NORMAL), - created_at=datetime.utcnow(), - job_id=notification.get("job", None), - job_row_number=notification.get("row_number", None), - reply_to_text=reply_to_text, - ) - - send_notification_to_queue( - saved_notification, - service.research_mode, - queue=notification.get("queue") or template.queue_to_use(), - ) - + # If the data is not present in the encrypted data then fallback on whats needed for process_job. + saved_notifications = persist_notifications(verified_notifications) current_app.logger.debug( - "SMS {} created at {} for job {}".format( - saved_notification.id, - saved_notification.created_at, - notification.get("job", None), - ) + f"Saved following notifications into db: {notification_id_queue.keys()} associated with receipt {receipt}" ) + if receipt: + acknowledge_receipt(SMS_TYPE, process_type, receipt) + current_app.logger.debug( + f"Batch saving: receipt_id {receipt} removed from buffer queue for notification_id {notification_id} for process_type {process_type}" + ) + else: + put_batch_saving_bulk_processed( + metrics_logger, + 1, + notification_type=SMS_TYPE, + priority=process_type, # type: ignore + ) except SQLAlchemyError as e: - handle_exception(self, notification, notification_id, e) - - -@notify_celery.task(bind=True, name="save-email", max_retries=5, default_retry_delay=300) -@statsd(namespace="tasks") -def save_email(self, service_id, notification_id, encrypted_notification, sender_id=None): - notification = encryption.decrypt(encrypted_notification) - service = dao_fetch_service_by_id(service_id, use_cache=True) - template = dao_get_template_by_id(notification["template"], version=notification["template_version"], use_cache=True) - - if sender_id: - reply_to_text = dao_get_reply_to_by_id(service_id, sender_id).email_address - if isinstance(template, tuple): - template = template[0] - # if the template is obtained from cache a tuple will be returned where - # the first element is the Template object and the second the template cache data - # in the form of a dict - elif isinstance(template, tuple): - reply_to_text = template[1].get("reply_to_text") - template = template[0] - else: - reply_to_text = template.get_reply_to_text() + signed_and_verified = list(zip(signed_notifications, verified_notifications)) + handle_batch_error_and_forward(self, signed_and_verified, SMS_TYPE, e, receipt, template) - # if the service is obtained from cache a tuple will be returned where - # the first element is the Service object and the second the service cache data - # in the form of a dict - if isinstance(service, tuple): - service = service[0] - - check_service_over_daily_message_limit(notification.get("key_type", KEY_TYPE_NORMAL), service) - - try: - # this task is used by two main things... process_job and process_sms_or_email_notification - # if the data is not present in the encrypted data then fallback on whats needed for process_job - saved_notification = persist_notification( - notification_id=notification.get("id", notification_id), - template_id=notification["template"], - template_version=notification["template_version"], - recipient=notification["to"], - service=service, - personalisation=notification.get("personalisation"), - notification_type=EMAIL_TYPE, - api_key_id=notification.get("api_key", None), - key_type=notification.get("key_type", KEY_TYPE_NORMAL), - created_at=datetime.utcnow(), - job_id=notification.get("job", None), - simulated=notification.get("simulated", None), - job_row_number=notification.get("row_number", None), - reply_to_text=reply_to_text, - client_reference=notification.get("client_reference", None), - ) - send_notification_to_queue( - saved_notification, - service.research_mode, - queue=notification.get("queue") or template.queue_to_use(), - ) - - current_app.logger.debug("Email {} created at {}".format(saved_notification.id, saved_notification.created_at)) - except SQLAlchemyError as e: - handle_exception(self, notification, notification_id, e) + current_app.logger.debug(f"Sending following sms notifications to AWS: {notification_id_queue.keys()}") + for notification_obj in saved_notifications: + try: + queue = notification_id_queue.get(notification_obj.id) or get_delivery_queue_for_template(template) + send_notification_to_queue( + notification_obj, + service.research_mode, + queue=queue, + ) + current_app.logger.debug( + "SMS {} created at {} for job {}".format( + notification_obj.id, + notification_obj.created_at, + notification_obj.job, + ) + ) + except (LiveServiceTooManySMSRequestsError, TrialServiceTooManySMSRequestsError) as e: + current_app.logger.info(f"{e.message}: SMS {notification_obj.id} not created") -@notify_celery.task(bind=True, name="save-letter", max_retries=5, default_retry_delay=300) +@notify_celery.task(bind=True, name="save-emails", max_retries=5, default_retry_delay=300) @statsd(namespace="tasks") -def save_letter( - self, - service_id, - notification_id, - encrypted_notification, -): - notification = encryption.decrypt(encrypted_notification) - - # we store the recipient as just the first item of the person's address - recipient = notification["personalisation"]["addressline1"] - - service = dao_fetch_service_by_id(service_id) - template = dao_get_template_by_id(notification["template"], version=notification["template_version"]) +def save_emails(self, _service_id: Optional[str], signed_notifications: List[SignedNotification], receipt: Optional[UUID]): + """ + Function that takes a list of signed notifications, stores + them in the DB and then sends these to the queue. If the receipt + is not None then it is passed to the RedisQueue to let it know it + can delete the inflight notifications. + """ + verified_notifications: List[VerifiedNotification] = [] + notification_id_queue: Dict = {} + saved_notifications: List[Notification] = [] + for signed_notification in signed_notifications: + try: + _notification = signer_notification.verify(signed_notification) + except BadSignature: + current_app.logger.exception(f"Invalid signature for signed_notification {signed_notification}") + raise + service_id = _notification.get("service_id", _service_id) # take it it out of the notification if it's there + service = dao_fetch_service_by_id(service_id, use_cache=True) + template = dao_get_template_by_id( + _notification.get("template"), version=_notification.get("template_version"), use_cache=True + ) + # todo: _notification does not have key "sender_id" + sender_id = _notification.get("sender_id") # type: ignore + notification_id = _notification.get("id", create_uuid()) + reply_to_text = "" # type: ignore + if ( + "reply_to_text" in _notification and _notification["reply_to_text"] + ): # first just see if we already have a value of this and use it, otherwise continue with the logic below + reply_to_text = _notification["reply_to_text"] # type: ignore + else: + if sender_id: + reply_to_text = dao_get_reply_to_by_id(service_id, sender_id).email_address + elif template.service: + reply_to_text = template.get_reply_to_text() # type: ignore + else: + reply_to_text = service.get_default_reply_to_email_address() + + notification: VerifiedNotification = { + **_notification, # type: ignore + "notification_id": notification_id, + "reply_to_text": reply_to_text, + "service": service, + "key_type": _notification.get("key_type", KEY_TYPE_NORMAL), + "template_id": template.id, + "template_version": template.version, + "recipient": _notification.get("to"), + "personalisation": _notification.get("personalisation"), + "notification_type": EMAIL_TYPE, # type: ignore + "simulated": _notification.get("simulated", None), + "api_key_id": _notification.get("api_key", None), + "created_at": datetime.utcnow(), + "job_id": _notification.get("job", None), + "job_row_number": _notification.get("row_number", None), + } - check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) + verified_notifications.append(notification) + notification_id_queue[notification_id] = notification.get("queue") # type: ignore + process_type = template.process_type try: - # if we don't want to actually send the letter, then start it off in SENDING so we don't pick it up - status = NOTIFICATION_CREATED if not service.research_mode else NOTIFICATION_SENDING - - saved_notification = persist_notification( - template_id=notification["template"], - template_version=notification["template_version"], - template_postage=template.postage, - recipient=recipient, - service=service, - personalisation=notification["personalisation"], - notification_type=LETTER_TYPE, - api_key_id=notification.get("api_key", None), - key_type=KEY_TYPE_NORMAL, - created_at=datetime.utcnow(), - job_id=notification["job"], - job_row_number=notification["row_number"], - notification_id=notification_id, - reference=create_random_identifier(), - reply_to_text=template.get_reply_to_text(), - status=status, + # If the data is not present in the encrypted data then fallback on whats needed for process_job + saved_notifications = persist_notifications(verified_notifications) + current_app.logger.debug( + f"Saved following notifications into db: {notification_id_queue.keys()} associated with receipt {receipt}" ) - - if not service.research_mode: - send_notification_to_queue(saved_notification, service.research_mode) - elif current_app.config["NOTIFY_ENVIRONMENT"] in ["preview", "development"]: - research_mode_tasks.create_fake_letter_response_file.apply_async( - (saved_notification.reference,), queue=QueueNames.RESEARCH_MODE + if receipt: + # todo: fix this potential bug + # template is whatever it was set to last in the for loop above + # at this point in the code we have a list of notifications (saved_notifications) + # which could use multiple templates + acknowledge_receipt(EMAIL_TYPE, process_type, receipt) + current_app.logger.debug( + f"Batch saving: receipt_id {receipt} removed from buffer queue for notification_id {notification_id} for process_type {process_type}" ) else: - update_notification_status_by_reference(saved_notification.reference, "delivered") - - current_app.logger.debug("Letter {} created at {}".format(saved_notification.id, saved_notification.created_at)) + put_batch_saving_bulk_processed( + metrics_logger, + 1, + notification_type=EMAIL_TYPE, + priority=process_type, # type: ignore + ) except SQLAlchemyError as e: - handle_exception(self, notification, notification_id, e) - - -@notify_celery.task(bind=True, name="update-letter-notifications-to-sent") -@statsd(namespace="tasks") -def update_letter_notifications_to_sent_to_dvla(self, notification_references): - # This task will be called by the FTP app to update notifications as sent to DVLA - provider = get_current_provider(LETTER_TYPE) - - updated_count, _ = dao_update_notifications_by_reference( - notification_references, - { - "status": NOTIFICATION_SENDING, - "sent_by": provider.identifier, - "sent_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), - }, - ) + signed_and_verified = list(zip(signed_notifications, verified_notifications)) + handle_batch_error_and_forward(self, signed_and_verified, EMAIL_TYPE, e, receipt, template) - current_app.logger.info("Updated {} letter notifications to sending".format(updated_count)) + if saved_notifications: + try_to_send_notifications_to_queue(notification_id_queue, service, saved_notifications, template) -@notify_celery.task(bind=True, name="update-letter-notifications-to-error") -@statsd(namespace="tasks") -def update_letter_notifications_to_error(self, notification_references): - # This task will be called by the FTP app to update notifications as sent to DVLA +def try_to_send_notifications_to_queue(notification_id_queue, service, saved_notifications, template): + """ + Loop through saved_notifications, check if the service has hit their daily rate limit, + and if not, call send_notification_to_queue on notification + """ + current_app.logger.debug(f"Sending following email notifications to AWS: {notification_id_queue.keys()}") + # todo: fix this potential bug + # service is whatever it was set to last in the for loop above. + # at this point in the code we have a list of notifications (saved_notifications) + # which could be from multiple services + research_mode = service.research_mode # type: ignore + for notification_obj in saved_notifications: + try: + queue = notification_id_queue.get(notification_obj.id) or get_delivery_queue_for_template(template) + send_notification_to_queue( + notification_obj, + research_mode, + queue, + ) - updated_count, _ = dao_update_notifications_by_reference( - notification_references, - {"status": NOTIFICATION_TECHNICAL_FAILURE, "updated_at": datetime.utcnow()}, - ) - message = "Updated {} letter notifications to technical-failure with references {}".format( - updated_count, notification_references - ) - raise NotificationTechnicalFailureException(message) + current_app.logger.debug( + "Email {} created at {} for job {}".format( + notification_obj.id, + notification_obj.created_at, + notification_obj.job, + ) + ) + except (LiveServiceTooManyRequestsError, TrialServiceTooManyRequestsError) as e: + current_app.logger.info(f"{e.message}: Email {notification_obj.id} not created") -def handle_exception(task, notification, notification_id, exc): - if not get_notification_by_id(notification_id): - retry_msg = "{task} notification for job {job} row number {row} and notification id {noti}".format( - task=task.__name__, - job=notification.get("job", None), - row=notification.get("row_number", None), - noti=notification_id, - ) +def handle_batch_error_and_forward( + task: Any, + signed_and_verified: list[tuple[Any, Any]], + notification_type: Optional[str], + exception, + receipt: Optional[UUID] = None, + template: Any = None, +): + if receipt: + current_app.logger.warning(f"Batch saving: could not persist notifications with receipt {receipt}: {str(exception)}") + else: + current_app.logger.warning(f"Batch saving: could not persist notifications: {str(exception)}") + process_type = template.process_type if template else None + + notifications_in_job: List[str] = [] + for signed, notification in signed_and_verified: + notification_id = notification["notification_id"] + notifications_in_job.append(notification_id) + service = notification["service"] # Sometimes, SQS plays the same message twice. We should be able to catch an IntegrityError, but it seems # SQLAlchemy is throwing a FlushError. So we check if the notification id already exists then do not # send to the retry queue. - current_app.logger.exception("Retry" + retry_msg) - try: - task.retry(queue=QueueNames.RETRY, exc=exc) - except task.MaxRetriesExceededError: - current_app.logger.error("Max retry failed" + retry_msg) + found = get_notification_by_id(notification_id) + if not found and service: + forward_msg = "Batch saving: forwarding notification {notif} to individual save from receipt {receipt}.".format( + notif=notification_id, + receipt=receipt, + ) + current_app.logger.info(forward_msg) + save_fn = save_emails if notification_type == EMAIL_TYPE else save_smss + + template = dao_get_template_by_id( + notification.get("template_id"), notification.get("template_version"), use_cache=True + ) + process_type = template.process_type + retry_msg = "{task} notification for job {job} row number {row} and notification id {notif} and max_retries are {max_retry}".format( + task=task.__name__, + job=notification.get("job", None), + row=notification.get("row_number", None), + notif=notification_id, + max_retry=task.max_retries, + ) + current_app.logger.warning("Retry " + retry_msg) + try: + # If >1 notification has failed, we want to make individual + # tasks to retry those notifications. + if len(signed_and_verified) > 1: + save_fn.apply_async( + (service.id, [signed], None), + queue=choose_database_queue(str(template.process_type), service.research_mode, notifications_count=1), + ) + current_app.logger.warning("Made a new task to retry") + else: + current_app.logger.warning("Retrying the current task") + task.retry(queue=QueueNames.RETRY, exc=exception) + except task.MaxRetriesExceededError: + current_app.logger.error("Max retry failed" + retry_msg) + + # end of the loop, purge the notifications from the buffer queue: + if receipt: + acknowledge_receipt(notification_type, process_type, receipt) + current_app.logger.info(f"Acknowledged notification ids: {str(notifications_in_job)} for receipt: {str(receipt)}") def get_template_class(template_type): @@ -434,41 +519,6 @@ def get_template_class(template_type): return WithSubjectTemplate -@notify_celery.task(bind=True, name="update-letter-notifications-statuses") -@statsd(namespace="tasks") -def update_letter_notifications_statuses(self, filename): - notification_updates = parse_dvla_file(filename) - - temporary_failures = [] - - for update in notification_updates: - check_billable_units(update) - update_letter_notification(filename, temporary_failures, update) - if temporary_failures: - # This will alert Notify that DVLA was unable to deliver the letters, we need to investigate - message = "DVLA response file: {filename} has failed letters with notification.reference {failures}".format( - filename=filename, failures=temporary_failures - ) - raise DVLAException(message) - - -@notify_celery.task(bind=True, name="record-daily-sorted-counts") -@statsd(namespace="tasks") -def record_daily_sorted_counts(self, filename): - sorted_letter_counts = defaultdict(int) - notification_updates = parse_dvla_file(filename) - for update in notification_updates: - sorted_letter_counts[update.cost_threshold.lower()] += 1 - - unknown_status = sorted_letter_counts.keys() - {"unsorted", "sorted"} - if unknown_status: - message = "DVLA response file: {} contains unknown Sorted status {}".format(filename, unknown_status.__repr__()) - raise DVLAException(message) - - billing_date = get_billing_date_in_est_from_filename(filename) - persist_daily_sorted_letter_counts(day=billing_date, file_name=filename, sorted_letter_counts=sorted_letter_counts) - - def parse_dvla_file(filename): bucket_location = "{}-ftp".format(current_app.config["NOTIFY_EMAIL_DOMAIN"]) response_file_content = s3.get_s3_file(bucket_location, filename) @@ -486,44 +536,12 @@ def get_billing_date_in_est_from_filename(filename): return convert_utc_to_local_timezone(datetime_obj).date() -def persist_daily_sorted_letter_counts(day, file_name, sorted_letter_counts): - daily_letter_count = DailySortedLetter( - billing_day=day, - file_name=file_name, - unsorted_count=sorted_letter_counts["unsorted"], - sorted_count=sorted_letter_counts["sorted"], - ) - dao_create_or_update_daily_sorted_letter(daily_letter_count) - - def process_updates_from_file(response_file): NotificationUpdate = namedtuple("NotificationUpdate", ["reference", "status", "page_count", "cost_threshold"]) notification_updates = [NotificationUpdate(*line.split("|")) for line in response_file.splitlines()] return notification_updates -def update_letter_notification(filename, temporary_failures, update): - if update.status == DVLA_RESPONSE_STATUS_SENT: - status = NOTIFICATION_DELIVERED - else: - status = NOTIFICATION_TEMPORARY_FAILURE - temporary_failures.append(update.reference) - - updated_count, _ = dao_update_notifications_by_reference( - references=[update.reference], - update_dict={"status": status, "updated_at": datetime.utcnow()}, - ) - - if not updated_count: - msg = ( - "Update letter notification file {filename} failed: notification either not found " - "or already updated from delivered. Status {status} for notification reference {reference}".format( - filename=filename, status=status, reference=update.reference - ) - ) - current_app.logger.info(msg) - - def check_billable_units(notification_update): notification = dao_get_notification_history_by_reference(notification_update.reference) @@ -597,7 +615,6 @@ def process_incomplete_jobs(job_ids): # reset the processing start time so that the check_job_status scheduled task doesn't pick this job up again for job in jobs: - job.job_status = JOB_STATUS_IN_PROGRESS job.processing_started = datetime.utcnow() dao_update_job(job) @@ -612,9 +629,9 @@ def process_incomplete_job(job_id): last_notification_added = dao_get_last_notification_added_for_job_id(job_id) if last_notification_added: - resume_from_row = last_notification_added.job_row_number + resume_from_row = last_notification_added.job_row_number + 1 else: - resume_from_row = -1 # The first row in the csv with a number is row 0 + resume_from_row = 0 # no rows have been added yet, resume from row 0 current_app.logger.info("Resuming job {} from row {}".format(job_id, resume_from_row)) @@ -622,16 +639,40 @@ def process_incomplete_job(job_id): TemplateClass = get_template_class(db_template.template_type) template = TemplateClass(db_template.__dict__) + template.process_type = db_template.process_type csv = get_recipient_csv(job, template) - for row in csv.get_rows(): - if row.index > resume_from_row: - process_row(row, template, job, job.service) + rows = csv.get_rows() # This returns an iterator + for result in chunked(islice(rows, resume_from_row, None), Config.BATCH_INSERTION_CHUNK_SIZE): + process_rows(result, template, job, job.service) + put_batch_saving_bulk_created( + metrics_logger, 1, notification_type=db_template.template_type, priority=db_template.process_type + ) + - job_complete(job, resumed=True) +def choose_database_queue(process_type: str, research_mode: bool, notifications_count: int) -> str: + # Research mode is a special case, it always goes to the research mode queue. + if research_mode: + return QueueNames.RESEARCH_MODE + + # We redirect first to a queue depending on its notification' size. + large_csv_threshold = current_app.config["CSV_BULK_REDIRECT_THRESHOLD"] + if notifications_count >= large_csv_threshold: + return QueueNames.BULK_DATABASE + # Don't switch to normal queue if it's already set to priority queue. + elif process_type == BULK: + return QueueNames.NORMAL_DATABASE + else: + # If the size isn't a concern, fall back to the template's process type. + if process_type == PRIORITY: + return QueueNames.PRIORITY_DATABASE + elif process_type == BULK: + return QueueNames.BULK_DATABASE + else: + return QueueNames.NORMAL_DATABASE -def queue_to_use(notifications_count: int) -> Optional[str]: +def choose_sending_queue(process_type: str, notif_type: str, notifications_count: int) -> Optional[str]: """Determine which queue to use depending on given parameters. We only check one rule at the moment: if the CSV file is big enough, @@ -639,51 +680,57 @@ def queue_to_use(notifications_count: int) -> Optional[str]: notifications that are transactional in nature. """ large_csv_threshold = current_app.config["CSV_BULK_REDIRECT_THRESHOLD"] - return QueueNames.BULK if notifications_count > large_csv_threshold else None - - -@notify_celery.task(name="process-returned-letters-list") -@statsd(namespace="tasks") -def process_returned_letters_list(notification_references): - updated, updated_history = dao_update_notifications_by_reference( - notification_references, {"status": NOTIFICATION_RETURNED_LETTER} - ) - - current_app.logger.info( - "Updated {} letter notifications ({} history notifications, from {} references) to returned-letter".format( - updated, updated_history, len(notification_references) - ) - ) + # Default to the pre-configured template's process type. + queue: Optional[str] = process_type + + if notifications_count >= large_csv_threshold: + queue = QueueNames.DELIVERY_QUEUES[notif_type][Priorities.LOW] + # If priority is slow/bulk, but lower than threshold, let's make it + # faster by switching to normal queue. + elif process_type == BULK: + queue = QueueNames.DELIVERY_QUEUES[notif_type][Priorities.MEDIUM] + else: + # If the size isn't a concern, fall back to the template's process type. + queue = QueueNames.DELIVERY_QUEUES[notif_type][Priorities.to_lmh(process_type)] + return queue @notify_celery.task(bind=True, name="send-notify-no-reply", max_retries=5) @statsd(namespace="tasks") def send_notify_no_reply(self, data): + """Sends no-reply emails to people replying back to GCNotify. + + This task will be fed by the AWS lambda code ses_receiving_emails. + https://github.com/cds-snc/notification-lambdas/blob/fd508d9718cef715f9297fedd8d780bc4bae0051/sesreceivingemails/ses_receiving_emails.py + """ payload = json.loads(data) service = dao_fetch_service_by_id(current_app.config["NOTIFY_SERVICE_ID"]) template = dao_get_template_by_id(current_app.config["NO_REPLY_TEMPLATE_ID"]) try: - saved_notification = persist_notification( - template_id=template.id, - template_version=template.version, - recipient=payload["sender"], - service=service, - personalisation={"sending_email_address": payload["recipients"][0]}, - notification_type=template.template_type, - api_key_id=None, - key_type=KEY_TYPE_NORMAL, - # Ensure that the reply to is not set, if people reply - # to these emails, they will go to the GC Notify service - # email address, and we handle those on the SES inbound - # Lambda - reply_to_text=None, - ) - - send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) - except Exception: + data_to_send = [ + dict( + template_id=template.id, + template_version=template.version, + recipient=payload["sender"], + service=service, + personalisation={"sending_email_address": payload["recipients"][0]}, + notification_type=template.template_type, + api_key_id=None, + key_type=KEY_TYPE_NORMAL, + # Ensure that the reply to is not set, if people reply + # to these emails, they will go to the GC Notify service + # email address, and we handle those on the SES inbound + # Lambda + reply_to_text=None, + ) + ] + saved_notifications = persist_notifications(data_to_send) + send_notification_to_queue(saved_notifications[0], False, queue=QueueNames.NOTIFY) + except Exception as e: try: + current_app.logger.warning(f"The exception is {repr(e)}") self.retry(queue=QueueNames.RETRY) except self.MaxRetriesExceededError: current_app.logger.error( @@ -700,3 +747,91 @@ def get_recipient_csv(job: Job, template: Template) -> RecipientCSV: placeholders=template.placeholders, max_rows=get_csv_max_rows(job.service_id), ) + + +def acknowledge_receipt(notification_type: Any, process_type: Any, receipt: UUID): # noqa + """ + Acknowledge the notification has been saved to the DB and sent to the service. + + Args: + notification_type: str + Type of notification being sent; either SMS_TYPE or EMAIL_TYPE + template: model.Template + Template used to send notification + + Returns: None + """ + queue_for = { + (SMS_TYPE, PRIORITY): sms_priority, + (SMS_TYPE, NORMAL): sms_normal, + (SMS_TYPE, BULK): sms_bulk, + (EMAIL_TYPE, PRIORITY): email_priority, + (EMAIL_TYPE, NORMAL): email_normal, + (EMAIL_TYPE, BULK): email_bulk, + } + queue = queue_for.get((notification_type, process_type)) + if queue is None: + raise ValueError( + f"acknowledge_receipt: No queue found for receipt {receipt} notification type {notification_type} and process type {process_type}" + ) + if queue.acknowledge(receipt): + return + + current_app.logger.warning(f"acknowledge_receipt: trying to acknowledge inflight everywhere for receipt {receipt}") + if ( + sms_priority.acknowledge(receipt) + or sms_normal.acknowledge(receipt) + or sms_bulk.acknowledge(receipt) + or email_priority.acknowledge(receipt) + or email_normal.acknowledge(receipt) + or email_bulk.acknowledge(receipt) + ): + return + else: + current_app.logger.warning(f"acknowledge_receipt: receipt {receipt} not found in any queue") + + +@notify_celery.task(name="seed-bounce-rate-in-redis") +@statsd(namespace="tasks") +def seed_bounce_rate_in_redis(service_id: str, interval: int = 24): + """ + Function to seed both the total_notifications and total_hard_bounces in Redis for a given service + over a given interval (default 24 hours) + + Args: + service_id (str): The service id to seed bounce rate for + interval: The number of hours to seed bounce rate for + """ + if bounce_rate_client.get_seeding_started(service_id) is False: + current_app.logger.info("Clear all data for current service {}".format(service_id)) + bounce_rate_client.clear_bounce_rate_data(service_id) + current_app.logger.info("Set seeding flag to True for service {}".format(service_id)) + bounce_rate_client.set_seeding_started(service_id) + else: + current_app.logger.info("Bounce rate already seeded for service_id {}".format(service_id)) + return + + current_app.logger.info("Seeding bounce rate for service_id {}".format(service_id)) + total_seeded_notifications = total_notifications_grouped_by_hour(service_id, interval=interval) + total_seeded_hard_bounces = total_hard_bounces_grouped_by_hour(service_id, interval=interval) + + for hour, total_notifications in total_seeded_notifications: + # set the timestamp to the start of the hour + 1 second to ensure the notification + # will be counted in the correct hour + hour_timestamp_s = int(hour.timestamp()) + 1 + # generate a list of tuples of (UUID, timestamp) that will be used to seed Redis + email_data = [(str(uuid4()), hour_timestamp_s) for _ in range(total_notifications)] + email_data_dict = dict(email_data) + bounce_rate_client.set_notifications_seeded(service_id, email_data_dict) + current_app.logger.info(f"Seeded total notification data for service {service_id} in Redis") + + for hour, total_hard_bounces in total_seeded_hard_bounces: + # set the timestamp to the start of the hour + 1 second to ensure the notification + # will be counted in the correct hour + hour_timestamp_s = int(hour.timestamp()) + 1 + # generate a list of tuples of (UUID, timestamp) that will be used to seed Redis + bounce_data = [(str(uuid4()), hour_timestamp_s) for _ in range(total_hard_bounces)] + bounce_data_dict = dict(bounce_data) + bounce_rate_client.set_hard_bounce_seeded(service_id, bounce_data_dict) + + current_app.logger.info(f"Seeded hard bounce data for service {service_id} in Redis") diff --git a/app/celery/utils.py b/app/celery/utils.py new file mode 100644 index 0000000000..f849e8f02a --- /dev/null +++ b/app/celery/utils.py @@ -0,0 +1,43 @@ +from typing import Any, Dict, Optional + +from flask import current_app + +from app import config, models + +# Default retry periods for sending notifications. +RETRY_DEFAULT = 300 +RETRY_HIGH = 25 + + +class CeleryParams(object): + # Important to load from the object and not the module to avoid + # circular imports, back and forth between the app and celery modules. + + RETRY_PERIODS = { + models.BULK: RETRY_DEFAULT, + models.NORMAL: RETRY_DEFAULT, + models.PRIORITY: RETRY_HIGH, + None: RETRY_HIGH, # In case we cannot identify the priority, treat it as high. + } + + @staticmethod + def retry(notification_process_type: Optional[str] = None, countdown: Optional[int] = None) -> Dict[str, Any]: + """ + Build task params for the sending parameter retry tasks. + + If the notification is a high priority SMS, set the retry policy to retry every 25 seconds + else fall back to the default retry policy of retrying every 5 minutes. + + Provide an override parameter for cases the calling task wants to override the retry policy. + """ + params: dict[str, Any] = {"queue": config.QueueNames.RETRY} + if current_app.config["FF_CELERY_CUSTOM_TASK_PARAMS"] is False: + return params + + if countdown is not None: + params["countdown"] = countdown + else: + # Overring the retry policy is only supported for SMS for now; + # email support coming later. + params["countdown"] = CeleryParams.RETRY_PERIODS[notification_process_type] + return params diff --git a/app/clients/document_download.py b/app/clients/document_download.py index 1491ef1284..5908c880e5 100644 --- a/app/clients/document_download.py +++ b/app/clients/document_download.py @@ -50,3 +50,16 @@ def upload_document(self, service_id, personalisation_key): raise error return response.json() + + def check_scan_verdict(self, service_id, document_id, sending_method): + url = f"{self.api_host}/services/{service_id}/documents/{document_id}/scan-verdict" + response = requests.post( + url, + headers={ + "Authorization": "Bearer {}".format(self.auth_token), + }, + data={ + "sending_method": sending_method, + }, + ) + return response diff --git a/app/clients/email/aws_ses.py b/app/clients/email/aws_ses.py index a1d71b546e..624f0f1fbe 100644 --- a/app/clients/email/aws_ses.py +++ b/app/clients/email/aws_ses.py @@ -7,7 +7,7 @@ import botocore from flask import current_app from notifications_utils.recipients import InvalidEmailError -from unidecode import unidecode +from notifications_utils.statsd_decorators import statsd from app.clients.email import EmailClient, EmailClientException @@ -30,6 +30,7 @@ def init_app(self, region, statsd_client, *args, **kwargs): def get_name(self): return self.name + @statsd(namespace="clients.ses") def send_email( self, source, @@ -61,7 +62,7 @@ def attach_html(m, content): attachments = attachments or [] if isinstance(to_addresses, str): to_addresses = [to_addresses] - source = unidecode(source) + reply_to_addresses = [reply_to_address] if reply_to_address else [] # - If sending a TXT email without attachments: @@ -105,7 +106,7 @@ def attach_html(m, content): # http://docs.aws.amazon.com/ses/latest/DeveloperGuide/api-error-codes.html if e.response["Error"]["Code"] == "InvalidParameterValue": - raise InvalidEmailError('email: "{}" message: "{}"'.format(to_addresses[0], e.response["Error"]["Message"])) + raise InvalidEmailError(f'message: "{e.response["Error"]["Message"]}"') else: self.statsd_client.incr("clients.ses.error") raise AwsSesClientException(str(e)) diff --git a/app/clients/freshdesk.py b/app/clients/freshdesk.py index 1fc63fc546..785096af5a 100644 --- a/app/clients/freshdesk.py +++ b/app/clients/freshdesk.py @@ -6,6 +6,14 @@ from flask import current_app from requests.auth import HTTPBasicAuth +from app.config import QueueNames +from app.dao.services_dao import dao_fetch_service_by_id +from app.dao.templates_dao import dao_get_template_by_id +from app.models import KEY_TYPE_NORMAL +from app.notifications.process_notifications import ( + persist_notification, + send_notification_to_queue, +) from app.user.contact_request import ContactRequest __all__ = ["Freshdesk"] @@ -17,7 +25,21 @@ def __init__(self, contact: ContactRequest): def _generate_description(self): message = self.contact.message - if self.contact.is_go_live_request(): + if self.contact.is_demo_request(): + message = "

".join( + [ + f"- user: {self.contact.name} {self.contact.email_address}", + f"- department/org: {self.contact.department_org_name}", + f"- program/service: {self.contact.program_service_name}", + f"- intended recipients: {self.contact.intended_recipients}", + f"- main use case: {self.contact.main_use_case}", + f"- main use case details: {self.contact.main_use_case_details}", + ] + ) + elif self.contact.is_go_live_request(): + # the ">" character breaks rendering for the freshdesk preview in slack + if self.contact.department_org_name: + self.contact.department_org_name = self.contact.department_org_name.replace(">", "/") message = "
".join( [ f"{self.contact.service_name} just requested to go live.", @@ -37,15 +59,38 @@ def _generate_description(self): f"A new logo has been uploaded by {self.contact.name} ({self.contact.email_address}) for the following service:", f"- Service id: {self.contact.service_id}", f"- Service name: {self.contact.service_name}", + f"- Organisation id: {self.contact.organisation_id}", + f"- Organisation name: {self.contact.department_org_name}", f"- Logo filename: {self.contact.branding_url}", + f"- Logo name: {self.contact.branding_logo_name}", + f"- Alt text english: {self.contact.alt_text_en}", + f"- Alt text french: {self.contact.alt_text_fr}", "
", f"Un nouveau logo a été téléchargé par {self.contact.name} ({self.contact.email_address}) pour le service suivant :", f"- Identifiant du service : {self.contact.service_id}", f"- Nom du service : {self.contact.service_name}", + f"- Identifiant de l'organisation: {self.contact.organisation_id}", + f"- Nom de l'organisation: {self.contact.department_org_name}", f"- Nom du fichier du logo : {self.contact.branding_url}", + f"- Nom du logo : {self.contact.branding_logo_name}", + f"- Texte alternatif anglais : {self.contact.alt_text_en}", + f"- Texte alternatif français : {self.contact.alt_text_fr}", + ] + ) + elif self.contact.is_new_template_category_request(): + message = "
".join( + [ + f"New template category request from {self.contact.name} ({self.contact.email_address}):", + f"- Service id: {self.contact.service_id}", + f"- New Template Category Request name: {self.contact.template_category_name_en}", + f"- Template id request: {self.contact.template_id_link}", + "
", + f"Demande de nouvelle catégorie de modèle de {self.contact.name} ({self.contact.email_address}):", + f"- Identifiant du service: {self.contact.service_id}", + f"- Nom de la nouvelle catégorie de modèle demandée: {self.contact.template_category_name_fr}", + f"- Demande d'identifiant de modèle: {self.contact.template_id_link}", ] ) - if len(self.contact.user_profile): message += f"

---

{self.contact.user_profile}" @@ -72,18 +117,50 @@ def send_ticket(self) -> int: if not api_url: raise NotImplementedError - # The API and field definitions are defined here: - # https://developer.zendesk.com/rest_api/docs/support/tickets - response = requests.post( - urljoin(api_url, "/api/v2/tickets"), - json=self._generate_ticket(), - auth=HTTPBasicAuth(current_app.config["FRESH_DESK_API_KEY"], "x"), - timeout=5, - ) - response.raise_for_status() - - return response.status_code + if current_app.config["FRESH_DESK_ENABLED"] is True: + # The API and field definitions are defined here: + # https://developer.zendesk.com/rest_api/docs/support/tickets + response = requests.post( + urljoin(api_url, "/api/v2/tickets"), + json=self._generate_ticket(), + auth=HTTPBasicAuth(current_app.config["FRESH_DESK_API_KEY"], "x"), + timeout=5, + ) + response.raise_for_status() + if response.status_code == 201: + current_app.logger.info( + f"Created Freshdesk ticket for service: {self.contact.service_id} type: {self.contact.support_type}" + ) + return response.status_code + else: + return 201 except requests.RequestException as e: - content = json.loads(response.content) - current_app.logger.error(f"Failed to create Freshdesk ticket: {content['errors']}") - raise e + current_app.logger.error(f"Failed to create Freshdesk ticket: {e}") + self.email_freshdesk_ticket(self._generate_ticket()) + return 201 + + def email_freshdesk_ticket(self, content: dict) -> None: + try: + template = dao_get_template_by_id(current_app.config["CONTACT_FORM_DIRECT_EMAIL_TEMPLATE_ID"]) + notify_service = dao_fetch_service_by_id(current_app.config["NOTIFY_SERVICE_ID"]) + + if current_app.config["CONTACT_FORM_EMAIL_ADDRESS"] is None: + current_app.logger.info("Cannot email contact us form, CONTACT_FORM_EMAIL_ADDRESS is empty") + else: + current_app.logger.info("Emailing contact us form to {}".format(current_app.config["CONTACT_FORM_EMAIL_ADDRESS"])) + saved_notification = persist_notification( + template_id=template.id, + template_version=template.version, + recipient=current_app.config["CONTACT_FORM_EMAIL_ADDRESS"], + service=notify_service, + personalisation={ + "contact_us_content": json.dumps(content, indent=4), + }, + notification_type=template.template_type, + api_key_id=None, + key_type=KEY_TYPE_NORMAL, + reply_to_text=notify_service.get_default_reply_to_email_address(), + ) + send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) + except Exception as e: + current_app.logger.exception(f"Failed to email contact form {json.dumps(content, indent=4)}, error: {e}") diff --git a/app/clients/mlwr/mlwr.py b/app/clients/mlwr/mlwr.py deleted file mode 100644 index d257c48240..0000000000 --- a/app/clients/mlwr/mlwr.py +++ /dev/null @@ -1,10 +0,0 @@ -from assemblyline_client import Client -from flask import current_app - - -def check_mlwr_score(sid): - client = Client( - current_app.config["MLWR_HOST"], - apikey=(current_app.config["MLWR_USER"], current_app.config["MLWR_KEY"]), - ) - return client.submission(sid) diff --git a/tests/app/letters/__init__.py b/app/clients/salesforce/__init__.py similarity index 100% rename from tests/app/letters/__init__.py rename to app/clients/salesforce/__init__.py diff --git a/app/clients/salesforce/salesforce_account.py b/app/clients/salesforce/salesforce_account.py new file mode 100644 index 0000000000..ec942b9259 --- /dev/null +++ b/app/clients/salesforce/salesforce_account.py @@ -0,0 +1,52 @@ +from typing import Optional + +from simple_salesforce import Salesforce + +from .salesforce_utils import query_one, query_param_sanitize + +ORG_NOTES_ORG_NAME_INDEX = 0 +ORG_NOTES_OTHER_NAME_INDEX = 1 + + +def get_org_name_from_notes(organisation_notes: str, name_index: int = ORG_NOTES_ORG_NAME_INDEX) -> str: + """Given a service's organisation notes, returns either the organisation name or + organisation other name. The organisation notes structure is as follows: + + ORG_NAME > ORG_OTHER_NAME + + If the `>` character is not found, the entire organisation notes is returned. + + TODO: This could be improved by explicitly passing the selected Account name + to the API from Admin rather than parsing it out of the organisation notes. + + Args: + organisation_notes (str): The service's organisation notes + name_index (int): The index of the name to return. Defaults to 0 (organisation name). + + Returns: + str: The organisation name or organisation other name. + """ + note_parts = organisation_notes.split(">") if isinstance(organisation_notes, str) else [] + if len(note_parts) > name_index: + return note_parts[name_index].strip() + return organisation_notes + + +def get_account_id_from_name(session: Optional[Salesforce], account_name: str, generic_account_id: str) -> Optional[str]: + """Returns the Account ID for the given Account Name. If no match is found, a generic + Account not found ID is returned. + + Args: + session (Salesforce): Salesforce session for the operation. + account_name (str): Account name to lookup the ID for. + generic_account_id (str): Generic Account ID to return if no match is found. + + Returns: + Optional[str]: The matching Account ID or a generic Account ID if no match is found. + """ + result = None + if isinstance(account_name, str) and account_name.strip() != "": + account_name_sanitized = query_param_sanitize(account_name) + query = f"SELECT Id FROM Account where Name = '{account_name_sanitized}' OR CDS_AccountNameFrench__c = '{account_name_sanitized}' LIMIT 1" + result = query_one(session, query) + return result.get("Id") if result else generic_account_id diff --git a/app/clients/salesforce/salesforce_auth.py b/app/clients/salesforce/salesforce_auth.py new file mode 100644 index 0000000000..3fbcdbdf3b --- /dev/null +++ b/app/clients/salesforce/salesforce_auth.py @@ -0,0 +1,61 @@ +from typing import Optional + +import requests +from flask import current_app +from simple_salesforce import Salesforce + +SALESFORCE_TIMEOUT_SECONDS = 3 + + +class TimeoutAdapter(requests.adapters.HTTPAdapter): + """Custom adapter to add a timeout to Salesforce API requests""" + + def send(self, *args, **kwargs): + kwargs["timeout"] = SALESFORCE_TIMEOUT_SECONDS + return super().send(*args, **kwargs) + + +def get_session(client_id: str, username: str, password: str, security_token: str, domain: str) -> Optional[Salesforce]: + """Return an authenticated Salesforce session + + Args: + client_id (str): The name of the Salesforce connected app. + username (str): The username to use for authentication. This users permissions will be used for the session. + password (str): The password of the user that is authenticating. + security_token (str): The security token of the user that is authenticating. + domain (str): The domain of the Salesforce instance. Use `test` for the QA instance. + + Returns: + Salesforce: the authenticated Salesforce session. + """ + session = None + try: + # Add a timeout to Salesforce API requests + requests_session = requests.Session() + requests_session.mount("https://", TimeoutAdapter()) + requests_session.mount("http://", TimeoutAdapter()) + + session = Salesforce( + client_id=client_id, + username=username, + password=password, + security_token=security_token, + domain=domain, + session=requests_session, + ) + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce login failed: {ex}") + return session + + +def end_session(session: Optional[Salesforce]): + """Logout of a Salesforce session + + Args: + session (Salesforce): The session to revoke. + """ + try: + if session and session.session_id: + session.oauth2("revoke", {"token": session.session_id}, method="POST") + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce logout failed: {ex}") diff --git a/app/clients/salesforce/salesforce_client.py b/app/clients/salesforce/salesforce_client.py new file mode 100644 index 0000000000..e320645df9 --- /dev/null +++ b/app/clients/salesforce/salesforce_client.py @@ -0,0 +1,160 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional, Tuple + +from simple_salesforce import Salesforce + +from . import ( + salesforce_account, + salesforce_auth, + salesforce_contact, + salesforce_engagement, + salesforce_utils, +) + +if TYPE_CHECKING: + from app.models import Service, User + + +class SalesforceClient: + def init_app(self, app): + self.client_id = app.config["SALESFORCE_CLIENT_ID"] + self.username = app.config["SALESFORCE_USERNAME"] + self.password = app.config["SALESFORCE_PASSWORD"] + self.security_token = app.config["SALESFORCE_SECURITY_TOKEN"] + self.domain = app.config["SALESFORCE_DOMAIN"] + self.generic_account_id = app.config["SALESFORCE_GENERIC_ACCOUNT_ID"] + + # + # Authentication + # + def get_session(self) -> Optional[Salesforce]: + """Returns an authenticated Salesforce session. + + Returns: + Salesforce: The authenticated Salesforce session. + """ + return salesforce_auth.get_session(self.client_id, self.username, self.password, self.security_token, self.domain) + + def end_session(self, session: Optional[Salesforce]) -> None: + """Revokes a Salesforce session. + + Args: + session (Salesforce): The Salesforce session to revoke. + """ + salesforce_auth.end_session(session) + + # + # Contacts + # + def contact_create(self, user: User) -> None: + """Creates a Salesforce Contact for the given Notify user + + Args: + user (User): The Notify user to create a Salesforce Contact for. + """ + session = self.get_session() + salesforce_contact.create(session, user, {}) + self.end_session(session) + + def contact_update(self, user: User) -> None: + """Updates a Salesforce Contact for the given Notify user. If the Contact does not exist, it is created. + + Args: + user (User): The Notify user to update the Salesforce Contact for. + """ + session = self.get_session() + name_parts = salesforce_utils.get_name_parts(user.name) + user_updates = { + "FirstName": name_parts["first"], + "LastName": name_parts["last"], + "Email": user.email_address, + } + salesforce_contact.update(session, user, user_updates) + self.end_session(session) + + def contact_update_account_id( + self, session: Optional[Salesforce], service: Service, user: User + ) -> Tuple[Optional[str], Optional[str]]: + """Updates the Account ID for the given Notify user's Salesforce Contact. The Salesforce Account ID + and Contact ID are returned. + + Args: + session (Salesforce): The Salesforce session to use for the operation. + service (Service): The Notify service to retrieve the account from. + user (User): The Notify user to update the Salesforce Contact for. If a contact does not exist, one will be created. + """ + account_name = salesforce_account.get_org_name_from_notes(service.organisation_notes) + account_id = salesforce_account.get_account_id_from_name(session, account_name, self.generic_account_id) + contact_id = salesforce_contact.update(session, user, {"AccountId": account_id}) + return account_id, contact_id + + # + # Engagements + # + def engagement_create(self, service: Service, user: User) -> None: + """Creates a Salesforce Engagement for the given Notify service. The Engagement will + be associated with the Notify user that created the Notify service. + + Args: + service (Service): Notify Service to create an Engagement for. + user (User): Notify User creating the service. + """ + session = self.get_session() + account_id, contact_id = self.contact_update_account_id(session, service, user) + salesforce_engagement.create(session, service, {}, account_id, contact_id) + self.end_session(session) + + def engagement_update(self, service: Service, user: User, field_updates: dict[str, str]) -> None: + """Updates a Salesforce Engagement for the given Notify service. The Engagement + will be associated with the Notify user that triggers the stage update. + + Args: + service (Service): Notify Service to update an Engagement for. + user (User): Notify User creating the service. + field_updates (dict[str, str]): The fields to update on the Engagement. + """ + session = self.get_session() + account_id, contact_id = self.contact_update_account_id(session, service, user) + salesforce_engagement.update(session, service, field_updates, account_id, contact_id) + self.end_session(session) + + def engagement_close(self, service: Service) -> None: + """Closes a Salesforce Engagement for the given Notify service. + + Args: + service (Service): Notify Service to close an Engagement for. + """ + session = self.get_session() + engagement = salesforce_engagement.get_engagement_by_service_id(session, str(service.id)) + if engagement: + close_update = { + "CDS_Close_Reason__c": "Service deleted by user", + "StageName": salesforce_engagement.ENGAGEMENT_STAGE_CLOSED, + } + salesforce_engagement.update(session, service, close_update, None, None) + self.end_session(session) + + def engagement_add_contact_role(self, service: Service, user: User) -> None: + """Adds a Salesforce ContactRole to an Engagement. + + Args: + service (Service): Notify Service that will have its Engagement's ContactRoles updated. + user (User): Notify User being added as a ContactRole. + """ + session = self.get_session() + account_id, contact_id = self.contact_update_account_id(session, service, user) + salesforce_engagement.contact_role_add(session, service, account_id, contact_id) + self.end_session(session) + + def engagement_delete_contact_role(self, service: Service, user: User) -> None: + """Deletes a Salesforce ContactRole from an Engagement. + + Args: + service (Service): Notify Service that will have its Engagement's ContactRoles updated. + user (User): Notify User being deleted as a ContactRole. + """ + session = self.get_session() + account_id, contact_id = self.contact_update_account_id(session, service, user) + salesforce_engagement.contact_role_delete(session, service, account_id, contact_id) + self.end_session(session) diff --git a/app/clients/salesforce/salesforce_contact.py b/app/clients/salesforce/salesforce_contact.py new file mode 100644 index 0000000000..a982c39c93 --- /dev/null +++ b/app/clients/salesforce/salesforce_contact.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + +from flask import current_app +from simple_salesforce import Salesforce + +from .salesforce_utils import ( + get_name_parts, + parse_result, + query_one, + query_param_sanitize, +) + +if TYPE_CHECKING: + from app.models import User + + +def create(session: Optional[Salesforce], user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: + """Create a Salesforce Contact from the given Notify User + + Args: + session (Salesforce): Salesforce session used to perform the operation. + user (User): Notify User that has just been activated. + field_updates (Optional[dict[str, str]]): Custom values used to override any default values. + + Returns: + Optional[str]: Newly created Contact ID or None if the operation failed. + """ + contact_id = None + try: + name_parts = get_name_parts(user.name) + field_default_values = { + "FirstName": name_parts["first"], + "LastName": name_parts["last"], + "Title": "created by Notify API", + "CDS_Contact_ID__c": str(user.id), + "Email": user.email_address, + } + field_values = field_default_values | field_updates + result = session.Contact.create( # type: ignore + field_values, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + parse_result(result, f"Salesforce Contact create for '{user.email_address}'") + contact_id = result.get("id") + + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce Contact create failed: {ex}") + return contact_id + + +def update(session: Optional[Salesforce], user: User, field_updates: dict[str, Optional[str]]) -> Optional[str]: + """Update a Contact's details. If the Contact does not exist, it is created. + + Args: + session (Salesforce): Salesforce session used to perform the operation. + user (User): Notify User object for the linked Contact to update + field_updates (dict[str, Optional[str]]): The contact fields to update. + + Returns: + contact_id (Optional[str]): ID of the updated Contact or None if the operation failed + """ + contact_id = None + try: + contact = get_contact_by_user_id(session, str(user.id)) + + # Existing contact, update the AccountID + if contact: + result = session.Contact.update( # type:ignore + str(contact.get("Id")), field_updates, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"} + ) + parse_result(result, f"Salesforce Contact update '{user.email_address}' with '{field_updates}'") + contact_id = contact.get("Id") + # Create the new contact + else: + contact_id = create(session, user, field_updates) + + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce Contact update failed: {ex}") + return contact_id + + +def get_contact_by_user_id(session: Optional[Salesforce], user_id: str) -> Optional[dict[str, str]]: + """Retrieve a Salesforce Contact by their Notify user ID. If + they can't be found, `None` is returned. + + Args: + session (Salesforce): Salesforce session used to perform the operation. + user_id (str): Notify user ID. + + Returns: + Optional[dict[str, str]]: Salesforce Contact details or None if can't be found + """ + result = None + if isinstance(user_id, str) and user_id.strip(): + query = f"SELECT Id, FirstName, LastName, AccountId FROM Contact WHERE CDS_Contact_ID__c = '{query_param_sanitize(user_id)}' LIMIT 1" + result = query_one(session, query) + return result diff --git a/app/clients/salesforce/salesforce_engagement.py b/app/clients/salesforce/salesforce_engagement.py new file mode 100644 index 0000000000..3462ceae7b --- /dev/null +++ b/app/clients/salesforce/salesforce_engagement.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING, Any, Dict, Optional + +from flask import current_app +from simple_salesforce import Salesforce + +from .salesforce_account import ORG_NOTES_OTHER_NAME_INDEX, get_org_name_from_notes +from .salesforce_utils import parse_result, query_one, query_param_sanitize + +if TYPE_CHECKING: + from app.models import Service + +ENGAGEMENT_PRODUCT = "GC Notify" +ENGAGEMENT_TEAM = "Platform" +ENGAGEMENT_TYPE = "New Business" +ENGAGEMENT_STAGE_ACTIVATION = "Activation" +ENGAGEMENT_STAGE_CLOSED = "Closed" +ENGAGEMENT_STAGE_LIVE = "Live" +ENGAGEMENT_STAGE_TRIAL = "Trial Account" + + +def create( + session: Optional[Salesforce], + service: Service, + field_updates: dict[str, str], + account_id: Optional[str], + contact_id: Optional[str], +) -> Optional[str]: + """Create a Salesforce Engagement for the given Notify service + + Args: + session (Salesforce): Salesforce session to perform the operation. + service (Service): The service's details for the engagement. + field_updates (Optional[dict[str, str]]): Custom values used to override any default values. + account_id (Optional[str]): Salesforce Account ID to associate with the Engagement. + contact_id (Optional[str]): Salesforce Contact ID to associate with the Engagement. + + Returns: + Optional[str]: Newly created Engagement ID or None if the operation failed. + """ + engagement_id = None + try: + if account_id and contact_id and session: + # Default Engagement values, which can be overridden by passing in field_updates + field_default_values = { + "Name": service.name, + "AccountId": account_id, + "ContactId": contact_id, + "CDS_Opportunity_Number__c": str(service.id), + "Notify_Organization_Other__c": get_org_name_from_notes(service.organisation_notes, ORG_NOTES_OTHER_NAME_INDEX), + "CloseDate": datetime.today().strftime("%Y-%m-%d"), + "RecordTypeId": current_app.config["SALESFORCE_ENGAGEMENT_RECORD_TYPE"], + "StageName": ENGAGEMENT_STAGE_TRIAL, + "Type": ENGAGEMENT_TYPE, + "CDS_Lead_Team__c": ENGAGEMENT_TEAM, + "Product_to_Add__c": ENGAGEMENT_PRODUCT, + } + field_values = field_default_values | field_updates + result = session.Opportunity.create( # type: ignore + engagement_maxlengths(field_values), + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + parse_result(result, f"Salesforce Engagement create for service ID {service.id}") + engagement_id = result.get("id") + + # Create the Product association + if engagement_id: + result = session.OpportunityLineItem.create( # type: ignore + { + "OpportunityId": engagement_id, + "PricebookEntryId": current_app.config["SALESFORCE_ENGAGEMENT_STANDARD_PRICEBOOK_ID"], + "Product2Id": current_app.config["SALESFORCE_ENGAGEMENT_PRODUCT_ID"], + "Quantity": 1, + "UnitPrice": 0, + }, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + parse_result(result, f"Salesforce Engagement OpportunityLineItem create for service ID {service.id}") + else: + current_app.logger.error( + f"SF_ERR Salesforce Engagement create failed: missing Account ID '{account_id}' or Contact ID '{contact_id}' for service ID {service.id} or the session is not available. '{session}'" + ) + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce Engagement create failed: {ex}") + return engagement_id + + +def update( + session: Optional[Salesforce], + service: Service, + field_updates: dict[str, str], + account_id: Optional[str], + contact_id: Optional[str], +) -> Optional[str]: + """Update an Engagement. If the Engagement does not exist, it is created. + + Args: + session (Salesforce): Salesforce session to perform the operation. + service (Service): The service's details for the engagement. + field_updates (dict[str, str]): The engagement fields to update. + account_id (Optional[str]): Salesforce Account ID to associate with the Engagement. + contact_id (Optional[str]): Salesforce Contact ID to associate with the Engagement. + + Returns: + Optional[str]: Updated Engagement ID or None if the operation failed. + """ + engagement_id = None + try: + engagement = get_engagement_by_service_id(session, str(service.id)) + + # Existing Engagement, update the stage name + if engagement: + result = session.Opportunity.update( # type: ignore + str(engagement.get("Id")), + engagement_maxlengths(field_updates), + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + is_updated = parse_result(result, f"Salesforce Engagement update '{service}' with '{field_updates}'") + engagement_id = engagement.get("Id") if is_updated else None + # Create the Engagement. This handles Notify services that were created before Salesforce was added. + else: + engagement_id = create(session, service, field_updates, account_id, contact_id) + + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce Engagement update failed: {ex}") + return engagement_id + + +def contact_role_add( + session: Optional[Salesforce], service: Service, account_id: Optional[str], contact_id: Optional[str] +) -> None: + """Adds an Engagement ContactRole based on the provided Notify service and Contact. If the + Engagement does not exist, it is created. + + Args: + session (Salesforce): Salesforce session to perform the operation. + service (Service): The service's details for the engagement. + account_id (Optional[str]): Salesforce Account ID to associate with the Engagement. + contact_id (Optional[str]): Salesforce Contact ID for the Engagement's ContactRole. + + Returns: + None + """ + try: + engagement = get_engagement_by_service_id(session, str(service.id)) + if engagement: + result = session.OpportunityContactRole.create( # type: ignore + {"ContactId": contact_id, "OpportunityId": engagement.get("Id")}, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + parse_result(result, f"Salesforce ContactRole add for {contact_id} with '{service.id}'") + else: + create(session, service, {}, account_id, contact_id) # This implicitly creates the ContactRole + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce ContactRole add for {contact_id} with '{service.id}' failed: {ex}") + + +def contact_role_delete( + session: Optional[Salesforce], service: Service, account_id: Optional[str], contact_id: Optional[str] +) -> None: + """Deletes an Engagement ContactRole based on the provided Notify service and Salesforce Contact. + If the Engagement does not exist, it is created. + + Args: + session (Salesforce): Salesforce session to perform the operation. + service (Service): The service's details for the engagement. + account_id (Optional[str]): Salesforce Account ID to associate with the Engagement. + contact_id (Optional[str]): Salesforce Contact ID to remove as a ContactRole. + + Returns: + None + """ + try: + result: Dict[str, Any] = {} + engagement = get_engagement_by_service_id(session, str(service.id)) + engagement_id = engagement.get("Id") if engagement else create(session, service, {}, account_id, contact_id) + engagement_contact_role = get_engagement_contact_role(session, engagement_id, contact_id) + + if engagement_contact_role: + result = session.OpportunityContactRole.delete(engagement_contact_role.get("Id")) # type: ignore + parse_result(result, f"Salesforce ContactRole delete for {contact_id} with '{service.id}'") + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce ContactRole delete for {contact_id} with '{service.id}' failed: {ex}") + + +def get_engagement_by_service_id(session: Optional[Salesforce], service_id: str) -> Optional[dict[str, Any]]: + """Retrieve a Salesforce Engagement by a Notify service ID + + Args: + session (Salesforce): Salesforce session to perform the operation. + service_id (str): Notify service ID + + Returns: + Optional[dict[str, str]]: Salesforce Engagement details or None if can't be found + """ + result = None + if isinstance(service_id, str) and service_id.strip() and session is not None: + query = f"SELECT Id, Name, ContactId, AccountId FROM Opportunity where CDS_Opportunity_Number__c = '{query_param_sanitize(service_id)}' LIMIT 1" + result = query_one(session, query) + return result + + +def get_engagement_contact_role( + session: Optional[Salesforce], engagement_id: Optional[str], contact_id: Optional[str] +) -> Optional[dict[str, Any]]: + """Retrieve a Salesforce Engagement ContactRole. + + Args: + session (Salesforce): Salesforce session to perform the operation. + engagement_id (str): Salesforce Engagement ID + contact_id (str): Salesforce Contact ID + + Returns: + Optional[dict[str, str]]: Salesforce Engagement ContactRole details or None if can't be found + """ + result = None + if isinstance(engagement_id, str) and engagement_id.strip() and isinstance(contact_id, str) and contact_id.strip(): + query = f"SELECT Id, OpportunityId, ContactId FROM OpportunityContactRole WHERE OpportunityId = '{query_param_sanitize(engagement_id)}' AND ContactId = '{query_param_sanitize(contact_id)}' LIMIT 1" + result = query_one(session, query) + return result + + +def engagement_maxlengths(fields: dict[str, str]) -> dict[str, str]: + """Given a dictionary of Engagement fields to update, truncate the values to the maximum length allowed by Salesforce. + + Args: + field_updates (dict[str, str]): Engagement fields to check + + Returns: + dict[str, str]: Field updates with values truncated to the maximum length allowed by Salesforce + """ + maxlengths = { + "Name": 120, + } + for field_name, maxlength in maxlengths.items(): + if field_name in fields: + fields[field_name] = fields[field_name][:maxlength] + return fields diff --git a/app/clients/salesforce/salesforce_utils.py b/app/clients/salesforce/salesforce_utils.py new file mode 100644 index 0000000000..0cb666112a --- /dev/null +++ b/app/clients/salesforce/salesforce_utils.py @@ -0,0 +1,82 @@ +from typing import Any, Optional + +from flask import current_app +from simple_salesforce import Salesforce + + +def get_name_parts(full_name: str) -> dict[str, str]: + """ + Splits a space separated fullname into first and last + name parts. If the name cannot be split, the first name will + be blank and the last name will be set to the passed in full name. + + This is because Salesforce requires a last name but allows the + last name to be blank. + + Args: + full_name (str): The space seperated full name + + Returns: + dict[str, str]: The first and last name parts + """ + name_parts = full_name.split() + return { + "first": name_parts[0] if len(name_parts) > 1 else "", + "last": " ".join(name_parts[1:]) if len(name_parts) > 1 else full_name, + } + + +def query_one(session: Optional[Salesforce], query: str) -> Optional[dict[str, Any]]: + """Execute an SOQL query that expects to return a single record. + + Args: + query (str): The SOQL query to execute + session (Salesforce): Authenticated Salesforce session + + Returns: + dict[str, Any]: The result of the query or None + """ + result = None + try: + if session is not None: + results = session.query(query) + if results.get("totalSize") == 1: + result = results.get("records")[0] + else: + current_app.logger.warn(f"SF_WARN Salesforce no results found for query {query}") + else: + current_app.logger.error("SF_ERR Salesforce session is None") + except Exception as ex: + current_app.logger.error(f"SF_ERR Salesforce query {query} failed: {ex}") + return result + + +def query_param_sanitize(param: str) -> str: + """Escape single quotes from parameters that will be used in + SOQL queries since these can cause injection attacks. + + Args: + param (str): Parameter to sanitize + + Returns: + str: Parameter with single quotes escaped + """ + return param.replace("'", r"\'") + + +def parse_result(result: int | dict[str, Any], op: str) -> bool: + """Parse a Salesforce API result object and log the result + + Args: + result (int | dict[str, any]): Salesforce API result + op (str): The operation we're logging + + Returns: + bool: Did the operation work? + """ + is_success = 200 <= result <= 299 if isinstance(result, int) else result.get("success", False) + if is_success: + current_app.logger.info(f"{op} succeeded") + else: + current_app.logger.error(f"SF_ERR {op} failed: {result}") + return is_success diff --git a/app/clients/sms/__init__.py b/app/clients/sms/__init__.py index 8d6472d19d..88ab822075 100644 --- a/app/clients/sms/__init__.py +++ b/app/clients/sms/__init__.py @@ -1,6 +1,13 @@ +from enum import Enum + from app.clients import Client, ClientException +class SmsSendingVehicles(Enum): + SHORT_CODE = "short_code" + LONG_CODE = "long_code" + + class SmsClientResponseException(ClientException): """ Base Exception for SmsClientsResponses diff --git a/app/clients/sms/aws_pinpoint.py b/app/clients/sms/aws_pinpoint.py new file mode 100644 index 0000000000..57c58c9f13 --- /dev/null +++ b/app/clients/sms/aws_pinpoint.py @@ -0,0 +1,88 @@ +from time import monotonic + +import boto3 +import phonenumbers + +from app.clients.sms import SmsClient, SmsSendingVehicles + + +class AwsPinpointClient(SmsClient): + """ + AWS Pinpoint SMS client + """ + + def init_app(self, current_app, statsd_client, *args, **kwargs): + self._client = boto3.client("pinpoint-sms-voice-v2", region_name="ca-central-1") + super(AwsPinpointClient, self).__init__(*args, **kwargs) + self.current_app = current_app + self.name = "pinpoint" + self.statsd_client = statsd_client + + def get_name(self): + return self.name + + def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None, service_id=None, sending_vehicle=None): + messageType = "TRANSACTIONAL" + matched = False + opted_out = False + response = {} + + if self.current_app.config["FF_TEMPLATE_CATEGORY"]: + use_shortcode_pool = ( + sending_vehicle == SmsSendingVehicles.SHORT_CODE + or str(service_id) == self.current_app.config["NOTIFY_SERVICE_ID"] + ) + else: + use_shortcode_pool = ( + str(template_id) in self.current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"] + or str(service_id) == self.current_app.config["NOTIFY_SERVICE_ID"] + ) + if use_shortcode_pool: + pool_id = self.current_app.config["AWS_PINPOINT_SC_POOL_ID"] + else: + pool_id = self.current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"] + + for match in phonenumbers.PhoneNumberMatcher(to, "US"): + matched = True + opted_out = False + to = phonenumbers.format_number(match.number, phonenumbers.PhoneNumberFormat.E164) + destinationNumber = to + try: + start_time = monotonic() + # For international numbers we send with an AWS number for the corresponding country, using our default sender id. + # Note that Canada does not currently support sender ids. + if phonenumbers.region_code_for_number(match.number) != "CA": + response = self._client.send_text_message( + DestinationPhoneNumber=destinationNumber, + MessageBody=content, + MessageType=messageType, + ConfigurationSetName=self.current_app.config["AWS_PINPOINT_CONFIGURATION_SET_NAME"], + ) + else: + response = self._client.send_text_message( + DestinationPhoneNumber=destinationNumber, + OriginationIdentity=pool_id, + MessageBody=content, + MessageType=messageType, + ConfigurationSetName=self.current_app.config["AWS_PINPOINT_CONFIGURATION_SET_NAME"], + ) + except self._client.exceptions.ConflictException as e: + if e.response.get("Reason") == "DESTINATION_PHONE_NUMBER_OPTED_OUT": + opted_out = True + else: + raise e + + except Exception as e: + self.statsd_client.incr("clients.pinpoint.error") + raise e + finally: + elapsed_time = monotonic() - start_time + self.current_app.logger.info("AWS Pinpoint request finished in {}".format(elapsed_time)) + self.statsd_client.timing("clients.pinpoint.request-time", elapsed_time) + self.statsd_client.incr("clients.pinpoint.success") + return "opted_out" if opted_out else response.get("MessageId") + + if not matched: + self.statsd_client.incr("clients.pinpoint.error") + self.current_app.logger.error("No valid numbers found in {}".format(to)) + raise ValueError("No valid numbers found for SMS delivery") diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index ed9eb2738e..d949de4c6d 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -2,8 +2,8 @@ from time import monotonic import boto3 -import botocore import phonenumbers +from notifications_utils.statsd_decorators import statsd from app.clients.sms import SmsClient @@ -25,7 +25,8 @@ def init_app(self, current_app, statsd_client, *args, **kwargs): def get_name(self): return self.name - def send_sms(self, to, content, reference, multi=True, sender=None): + @statsd(namespace="clients.sns") + def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None, service_id=None, sending_vehicle=None): matched = False for match in phonenumbers.PhoneNumberMatcher(to, "US"): @@ -64,12 +65,9 @@ def send_sms(self, to, content, reference, multi=True, sender=None): try: start_time = monotonic() response = client.publish(PhoneNumber=to, Message=content, MessageAttributes=attributes) - except botocore.exceptions.ClientError as e: - self.statsd_client.incr("clients.sns.error") - raise str(e) except Exception as e: self.statsd_client.incr("clients.sns.error") - raise str(e) + raise e finally: elapsed_time = monotonic() - start_time self.current_app.logger.info("AWS SNS request finished in {}".format(elapsed_time)) diff --git a/app/clients/zendesk.py b/app/clients/zendesk.py deleted file mode 100644 index e39f4402df..0000000000 --- a/app/clients/zendesk.py +++ /dev/null @@ -1,82 +0,0 @@ -from typing import Dict, List, Union -from urllib.parse import urljoin - -import requests -from flask import current_app -from requests.auth import HTTPBasicAuth - -from app.user.contact_request import ContactRequest - -__all__ = ["Zendesk"] - - -class Zendesk(object): - def __init__(self, contact: ContactRequest): - self.api_url = current_app.config["ZENDESK_API_URL"] - self.token = current_app.config["ZENDESK_API_KEY"] - self.contact = contact - - def _generate_description(self): - message = self.contact.message - if self.contact.is_go_live_request(): - message = "
".join( - [ - f"{self.contact.service_name} just requested to go live.", - "", - f"- Department/org: {self.contact.department_org_name}", - f"- Intended recipients: {self.contact.intended_recipients}", - f"- Purpose: {self.contact.main_use_case}", - f"- Notification types: {self.contact.notification_types}", - f"- Expected monthly volume: {self.contact.expected_volume}", - "---", - self.contact.service_url, - ] - ) - elif self.contact.is_branding_request(): - message = "
".join( - [ - f"A new logo has been uploaded by {self.contact.name} ({self.contact.email_address}) for the following service:", - f"- Service id: {self.contact.service_id}", - f"- Service name: {self.contact.service_name}", - f"- Logo filename: {self.contact.branding_url}", - "
", - f"Un nouveau logo a été téléchargé par {self.contact.name} ({self.contact.email_address}) pour le service suivant :", - f"- Identifiant du service : {self.contact.service_id}", - f"- Nom du service : {self.contact.service_name}", - f"- Nom du fichier du logo : {self.contact.branding_url}", - ] - ) - - if len(self.contact.user_profile): - message += f"

---

{self.contact.user_profile}" - - return message - - # Update for Zendesk API Ticket format - # read docs: https://developer.zendesk.com/rest_api/docs/core/tickets#create-ticket - def _generate_ticket(self) -> Dict[str, Dict[str, Union[str, int, List[str]]]]: - return { - "ticket": { - "subject": self.contact.friendly_support_type, - "description": self._generate_description(), - "email": self.contact.email_address, - "tags": self.contact.tags - + ["notification_api"], # Custom tag used to auto-assign ticket to the notification support group - } - } - - def send_ticket(self): - if not self.api_url or not self.token: - raise NotImplementedError - - # The API and field definitions are defined here: - # https://developer.zendesk.com/rest_api/docs/support/tickets - response = requests.post( - urljoin(self.api_url, "/api/v2/tickets"), - json=self._generate_ticket(), - auth=HTTPBasicAuth(f"{self.contact.email_address}/token", self.token), - timeout=5, - ) - - if response.status_code != 201: - raise requests.HTTPError(response.status_code, "Failed to create zendesk ticket") diff --git a/app/clients/zendesk_sell.py b/app/clients/zendesk_sell.py deleted file mode 100644 index d5451ba0f7..0000000000 --- a/app/clients/zendesk_sell.py +++ /dev/null @@ -1,396 +0,0 @@ -import json -from enum import Enum -from typing import Dict, List, Optional, Tuple, Union -from urllib.parse import urljoin - -import requests -from flask import current_app - -from app.authentication.bearer_auth import BearerAuth -from app.models import Service, User -from app.user.contact_request import ContactRequest - -__all__ = ["ZenDeskSell"] - - -class ZenDeskSell(object): - - # FIXME: consider making this an environment variable - OWNER_ID = 2693899 - - STATUS_CREATE_TRIAL = 11826762 - STATUS_CLOSE_LIVE = 11826764 - - class NoteResourceType(Enum): - LEAD = "lead" - CONTACT = "contact" - DEAL = "deal" - - def __init__(self): - self.api_url = current_app.config["ZENDESK_SELL_API_URL"] - self.token = current_app.config["ZENDESK_SELL_API_KEY"] - - @staticmethod - def _name_split(name: str) -> Tuple[str, str]: - # FIXME: consider migrating to pypi/nameparser for proper name parsing to handle cases like: - # 'Dr. Juan Q. Xavier de la Vega III (Doc Vega)' - name_tokenised = name.split() - return ( - " ".join(name_tokenised[:-1]) if len(name_tokenised) > 1 else "", - name_tokenised[-1], - ) - - @staticmethod - def _generate_lead_data( - contact: ContactRequest, - ) -> Dict[str, Union[str, List[str], Dict]]: - - # validation based upon api mandatory fields - assert len(contact.name) or len(contact.department_org_name), "Name or Org are mandatory field" - - recipients = { - "internal": "Colleagues within your department (internal)", - "external": "Partners from other organizations (external)", - "public": "Public", - } - - first_name, last_name = ZenDeskSell._name_split(contact.name) - return { - "data": { - "last_name": last_name, - "first_name": first_name, - "organization_name": contact.department_org_name, - "owner_id": ZenDeskSell.OWNER_ID, - "email": contact.email_address, - "description": f"Program: {contact.program_service_name}\n{contact.main_use_case}: " - f"{contact.main_use_case_details}", - "tags": [contact.support_type, contact.language], - "status": "New", - "source_id": 2085874, # hard coded value defined by Zendesk for 'Demo request form' - "custom_fields": { - "Product": ["Notify"], - "Intended recipients": recipients[contact.intended_recipients] - if contact.intended_recipients in recipients - else "No value", - }, - } - } - - @staticmethod - def _generate_contact_data(user: User) -> Dict[str, Union[str, List[str], Dict]]: - - # validation based upon api mandatory fields - assert len(user.name) and len(user.email_address), "Name or email are mandatory field" - - first_name, last_name = ZenDeskSell._name_split(user.name) - return { - "data": { - "last_name": last_name, - "first_name": first_name, - "email": user.email_address, - "mobile": user.mobile_number, - "owner_id": ZenDeskSell.OWNER_ID, - "custom_fields": { - "notify_user_id": str(user.id), - }, - } - } - - @staticmethod - def _generate_deal_data(contact_id: str, service: Service, stage_id: int) -> Dict[str, Union[str, List[str], Dict]]: - return { - "data": { - "contact_id": contact_id, - "name": service.name, - "stage_id": stage_id, - "owner_id": ZenDeskSell.OWNER_ID, - "custom_fields": { - "notify_service_id": str(service.id), - }, - } - } - - @staticmethod - def _generate_lead_conversion_data(lead_id: str): - return { - "data": { - "lead_id": lead_id, - "owner_id": ZenDeskSell.OWNER_ID, - "create_deal": False, - } - } - - @staticmethod - def _generate_note_data(resource_type: NoteResourceType, resource_id: str, content: str) -> Dict[str, Dict[str, str]]: - return { - "data": { - "resource_type": resource_type.value, - "resource_id": resource_id, - "content": content, - } - } - - @staticmethod - def _generate_note_content(contact: ContactRequest) -> str: - return "\n".join( - [ - "Live Notes", - f"{contact.service_name} just requested to go live.", - "", - f"- Department/org: {contact.department_org_name}", - f"- Intended recipients: {contact.intended_recipients}", - f"- Purpose: {contact.main_use_case}", - f"- Notification types: {contact.notification_types}", - f"- Expected monthly volume: {contact.expected_volume}", - "---", - contact.service_url, - ] - ) - - def _send_request( - self, method: str, relative_url: str, data: str = None - ) -> Tuple[requests.models.Response, Optional[Exception]]: - - if not self.api_url or not self.token: - raise NotImplementedError - - try: - response = requests.request( - method=method, - url=urljoin(self.api_url, relative_url), - data=data, - headers={ - "Accept": "application/json", - "Content-Type": "application/json", - }, - auth=BearerAuth(token=self.token), - timeout=5, - ) - response.raise_for_status() - return response, None - except requests.RequestException as e: - return response, e - - def upsert_lead(self, contact: ContactRequest) -> int: - - # The API and field definitions are defined here: https://developers.getbase.com/docs/rest/reference/leads - - # name is mandatory for zen desk sell API - assert len(contact.name), "Zendesk sell requires a name for its API" - - resp, e = self._send_request( - method="POST", - relative_url=f"/v2/leads/upsert?email={contact.email_address}", - data=json.dumps(ZenDeskSell._generate_lead_data(contact)), - ) - if e: - content = json.loads(resp.content) - current_app.logger.warning(f"Failed to create zendesk sell lead: {content['errors']}") - raise e - - return resp.status_code - - def search_lead_id(self, user: User) -> Optional[str]: - resp, e = self._send_request(method="GET", relative_url=f"/v2/leads?email={user.email_address}") - if e: - current_app.logger.warning("Failed to search for lead") - return None - - try: - # default to the first lead as we try to perform lead upsert - # There SHOULDN'T be any case where there is more than 1 entry - resp_dict = resp.json() - return resp_dict["items"][0]["data"]["id"] - except (json.JSONDecodeError, KeyError, IndexError): - current_app.logger.warning(f"Invalid response: {resp.text}") - return None - - def search_deal_id(self, service: Service) -> Optional[str]: - resp, e = self._send_request( - method="GET", - relative_url=f"/v2/deals?custom_fields[notify_service_id]={str(service.id)}", - ) - if e: - current_app.logger.warning("Failed to search for deal") - return None - - try: - # default to the first deal as we try to perform deal upsert - # There SHOULDN'T be any case where there is more than 1 entry - resp_dict = resp.json() - return resp_dict["items"][0]["data"]["id"] - except (json.JSONDecodeError, KeyError, IndexError): - current_app.logger.warning(f"Invalid response: {resp.text}") - return None - - def convert_lead_to_contact(self, user: User) -> Optional[str]: - - lead_id = self.search_lead_id(user) - if not lead_id: - return None - - # The API and field definitions are defined here: - # https://developers.getbase.com/docs/rest/reference/lead_conversions - - resp, e = self._send_request( - method="POST", - relative_url="/v2/lead_conversions", - data=json.dumps(ZenDeskSell._generate_lead_conversion_data(lead_id)), - ) - if e: - current_app.logger.warning("Failed to create convert a lead to a contact") - return None - - try: - resp_dict = resp.json() - return resp_dict["data"]["individual_id"] - except (json.JSONDecodeError, KeyError): - current_app.logger.warning(f"Invalid response: {resp.text}") - return None - - def upsert_contact(self, user: User, contact_id: Optional[str]) -> Tuple[Optional[str], bool]: - - # The API and field definitions are defined here: https://developers.getbase.com/docs/rest/reference/contacts - data = json.dumps(ZenDeskSell._generate_contact_data(user)) - if contact_id: - # explicit update as '/upsert?contact_id=' is not reliable - resp, e = self._send_request(method="PUT", relative_url=f"/v2/contacts/{contact_id}", data=data) - else: - resp, e = self._send_request( - method="POST", - relative_url=f"/v2/contacts/upsert?custom_fields[notify_user_id]={str(user.id)}", - data=data, - ) - - if e: - current_app.logger.warning("Failed to create zendesk sell contact") - return None, False - - # response validation - try: - resp_data = resp.json() - return ( - resp_data["data"]["id"], - resp_data["data"]["created_at"] == resp_data["data"]["updated_at"], - ) - - except (json.JSONDecodeError, KeyError): - current_app.logger.warning(f"Invalid response: {resp.text}") - return None, False - - def delete_contact(self, contact_id: str) -> None: - - # The API and field definitions are defined here: https://developers.getbase.com/docs/rest/reference/contacts - resp, e = self._send_request(method="DELETE", relative_url=f"/v2/contacts/{contact_id}") - if e: - current_app.logger.warning(f"Failed to delete zendesk sell contact: {contact_id}") - - def upsert_deal(self, contact_id: str, service: Service, stage_id: int) -> Optional[str]: - # The API and field definitions are defined here: https://developers.getbase.com/docs/rest/reference/deals - - resp, e = self._send_request( - method="POST", - relative_url=f"/v2/deals/upsert?" f"custom_fields[notify_service_id]={str(service.id)}", - data=json.dumps(ZenDeskSell._generate_deal_data(contact_id, service, stage_id)), - ) - - if e: - current_app.logger.warning("Failed to create zendesk sell deal") - return None - - # response validation - try: - resp_data = resp.json() - return resp_data["data"]["id"] - - except (json.JSONDecodeError, KeyError): - current_app.logger.warning(f"Invalid response: {resp.text}") - return None - - def create_note(self, resource_type: NoteResourceType, resource_id: str, contact: ContactRequest) -> Optional[str]: - - # The API and field definitions are defined here: - # https://developers.getbase.com/docs/rest/reference/notes - resp, e = self._send_request( - method="POST", - relative_url="/v2/notes", - data=json.dumps( - ZenDeskSell._generate_note_data( - resource_type, - resource_id, - ZenDeskSell._generate_note_content(contact), - ) - ), - ) - if e: - current_app.logger.warning(f"Failed to create note for {resource_type}") - return None - - try: - resp_data = resp.json() - return resp_data["data"]["id"] - - except (json.JSONDecodeError, KeyError): - current_app.logger.warning(f"Invalid response: {resp.text}") - return None - - def _common_create_or_go_live( - self, - service: Service, - user: User, - status: int, - contact_id: Optional[str] = None, - ) -> Optional[str]: - # Upsert a contact (create/update). Only when this is successful does the software upsert a deal - # and link the deal to the contact. - # If upsert deal fails go back and delete the contact ONLY if it never existed before - contact_id, is_created = self.upsert_contact(user, contact_id) - if not contact_id: - return None - - deal_id = self.upsert_deal(contact_id, service, status) - if not deal_id and is_created: - # best effort here - self.delete_contact(contact_id) - return None - - return deal_id - - def send_go_live_request(self, service: Service, user: User, contact: ContactRequest) -> Optional[str]: - deal_id = self.search_deal_id(service) - if not deal_id: - # if no entry has been created, try to rehydrate the contact and deal from the user and service - deal_id = self.send_create_service(service, user) - - if deal_id: - return self.create_note(ZenDeskSell.NoteResourceType.DEAL, deal_id, contact) - - return None - - def send_go_live_service(self, service: Service, user: User) -> Optional[str]: - return self._common_create_or_go_live(service, user, ZenDeskSell.STATUS_CLOSE_LIVE) - - def send_create_service(self, service: Service, user: User) -> Optional[str]: - try: - contact_id = self.convert_lead_to_contact(user) - if contact_id: - return self._common_create_or_go_live( - service, - user, - ZenDeskSell.STATUS_CREATE_TRIAL, - contact_id=contact_id, - ) - else: - return self._common_create_or_go_live(service, user, ZenDeskSell.STATUS_CREATE_TRIAL) - except Exception as e: - current_app.logger.warning(f"failed to convert a lead into a contact: {e}") - - # still go through with upsert the contact and lead - return self._common_create_or_go_live(service, user, ZenDeskSell.STATUS_CREATE_TRIAL) - - def send_contact_request(self, contact: ContactRequest) -> int: - ret = 200 - if contact.is_demo_request(): - ret = self.upsert_lead(contact) - - return ret diff --git a/app/cloudfoundry_config.py b/app/cloudfoundry_config.py deleted file mode 100644 index f5dcd93d5e..0000000000 --- a/app/cloudfoundry_config.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -Extracts cloudfoundry config from its json and populates the environment variables that we would expect to be populated -on local/aws boxes -""" - -import json -import os - - -def extract_cloudfoundry_config(): - vcap_services = json.loads(os.environ["VCAP_SERVICES"]) - set_config_env_vars(vcap_services) - - -def set_config_env_vars(vcap_services): - # Postgres config - os.environ["SQLALCHEMY_DATABASE_URI"] = vcap_services["postgres"][0]["credentials"]["uri"] - - vcap_application = json.loads(os.environ["VCAP_APPLICATION"]) - os.environ["NOTIFY_ENVIRONMENT"] = vcap_application["space_name"] - os.environ["NOTIFY_LOG_PATH"] = "/home/vcap/logs/app.log" diff --git a/app/commands.py b/app/commands.py index d99b875072..e36fcdbbe4 100644 --- a/app/commands.py +++ b/app/commands.py @@ -14,14 +14,13 @@ from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound -from app import DATETIME_FORMAT, db, encryption +from app import DATETIME_FORMAT, db, signer_delivery_status from app.aws import s3 from app.celery.letters_pdf_tasks import create_letters_pdf from app.celery.nightly_tasks import ( send_total_sent_notifications_to_performance_platform, ) from app.celery.service_callback_tasks import send_delivery_status_to_service -from app.celery.tasks import record_daily_sorted_counts from app.config import QueueNames from app.dao.annual_billing_dao import dao_create_or_update_annual_billing_for_year from app.dao.fact_billing_dao import ( @@ -254,7 +253,6 @@ def backfill_performance_platform_totals(start_date, end_date): print("Sending total messages sent for all days between {} and {}".format(start_date, end_date)) for i in range(delta.days + 1): - process_date = start_date + timedelta(days=i) print("Sending total messages sent for {}".format(process_date.isoformat())) @@ -429,8 +427,8 @@ def replay_service_callbacks(file_name, service_id): "service_callback_api_url": callback_api.url, "service_callback_api_bearer_token": callback_api.bearer_token, } - encrypted_status_update = encryption.encrypt(data) - send_delivery_status_to_service.apply_async([str(n.id), encrypted_status_update], queue=QueueNames.CALLBACKS) + signed_status_update = signer_delivery_status.sign(data) + send_delivery_status_to_service.apply_async([str(n.id), signed_status_update], queue=QueueNames.CALLBACKS) print( "Replay service status for service: {}. Sent {} notification status updates to the queue".format( @@ -460,7 +458,6 @@ def setup_commands(application): ) @statsd(namespace="tasks") def migrate_data_to_ft_billing(start_date, end_date): - current_app.logger.info("Billing migration from date {} to {}".format(start_date, end_date)) process_date = start_date @@ -592,7 +589,6 @@ def rebuild_ft_data(process_day, service): ) @statsd(namespace="tasks") def migrate_data_to_ft_notification_status(start_date, end_date): - print("Notification statuses migration from date {} to {}".format(start_date, end_date)) process_date = start_date @@ -841,7 +837,6 @@ def replay_daily_sorted_count_files(file_extension): suffix=file_extension or ".rs.txt", ): print("Create task to record daily sorted counts for file: ", filename) - record_daily_sorted_counts.apply_async([filename], queue=QueueNames.NOTIFY) @notify_command(name="populate-organisations-from-file") @@ -1050,3 +1045,20 @@ def fix_billable_units(): Notification.query.filter(Notification.id == notification.id).update({"billable_units": template.fragment_count}) db.session.commit() print("End fix_billable_units") + + +@notify_command(name="admin") +@click.option("-u", "--user_email", required=True, help="user email address") +@click.option("--on/--off", required=False, default=True, show_default="on", help="set admin on or off") +def toggle_admin(user_email, on): + """ + Set a user to be a platform admin or not + """ + try: + user = User.query.filter(User.email_address == user_email).one() + except NoResultFound: + print(f"User {user_email} not found") + return + user.platform_admin = on + db.session.commit() + print(f"User {user.email_address} is now {'an admin' if user.platform_admin else 'not an admin'}") diff --git a/app/config.py b/app/config.py index e9bb675d3b..fcb6aa4297 100644 --- a/app/config.py +++ b/app/config.py @@ -4,24 +4,50 @@ from typing import Any, List from dotenv import load_dotenv +from environs import Env from fido2.server import Fido2Server from fido2.webauthn import PublicKeyCredentialRpEntity from kombu import Exchange, Queue +from notifications_utils import logging from celery.schedules import crontab +env = Env() +env.read_env() load_dotenv() -if os.getenv("VCAP_SERVICES"): - # on cloudfoundry, config is a json blob in VCAP_SERVICES - unpack it, and populate - # standard environment variables from it - from app.cloudfoundry_config import extract_cloudfoundry_config - extract_cloudfoundry_config() +class Priorities(object): + LOW = "low" + MEDIUM = "medium" + HIGH = "high" + BULK = "bulk" + NORMAL = "normal" + PRIORITY = "priority" + @staticmethod + def to_lmh(priority: str) -> str: + """ + Convert bulk / normal / priority to low / medium / high. Anything else left alone. -class QueueNames(object): + Args: + priority (str): priority to convert. + + Returns: + str: low, medium, or high + """ + if priority == Priorities.BULK: + return Priorities.LOW + elif priority == Priorities.NORMAL: + return Priorities.MEDIUM + elif priority == Priorities.PRIORITY: + return Priorities.HIGH + else: + return priority + + +class QueueNames(object): # Periodic tasks executed by Notify. PERIODIC = "periodic-tasks" @@ -33,20 +59,38 @@ class QueueNames(object): # It would get most traffic coming from the API for example. BULK = "bulk-tasks" + NORMAL = "normal-tasks" + # A queue meant for database tasks but it seems to be the default for sending # notifications in some occasion. Need to investigate the purpose of this one # further. DATABASE = "database-tasks" - # Queue for sending all SMS, except long dedicated numbers. - SEND_SMS = "send-sms-tasks" + # database operations for high priority notifications + PRIORITY_DATABASE = "-priority-database-tasks.fifo" + + # database operations for normal priority notifications + NORMAL_DATABASE = "-normal-database-tasks" + + # database operations for bulk notifications + BULK_DATABASE = "-bulk-database-tasks" + + # A queue for the tasks associated with the batch saving + NOTIFY_CACHE = "notifiy-cache-tasks" + + # Queues for sending all SMS, except long dedicated numbers. + SEND_SMS_HIGH = "send-sms-high" + SEND_SMS_MEDIUM = "send-sms-medium" + SEND_SMS_LOW = "send-sms-low" # Primarily used for long dedicated numbers sent from us-west-2 upon which # we have a limit to send per second and hence, needs to be throttled. SEND_THROTTLED_SMS = "send-throttled-sms-tasks" - # The queue to send emails by default. - SEND_EMAIL = "send-email-tasks" + # Queues for sending all emails. + SEND_EMAIL_HIGH = "send-email-high" + SEND_EMAIL_MEDIUM = "send-email-medium" + SEND_EMAIL_LOW = "send-email-low" # The research mode queue for notifications that are tested by users trying # out Notify. @@ -63,6 +107,7 @@ class QueueNames(object): PROCESS_FTP = "process-ftp-tasks" CREATE_LETTERS_PDF = "create-letters-pdf-tasks" CALLBACKS = "service-callbacks" + CALLBACKS_RETRY = "service-callbacks-retry" # Queue for letters, unused by CDS at this time as we don't use these. LETTERS = "letter-tasks" @@ -73,6 +118,24 @@ class QueueNames(object): # Queue for delivery receipts such as emails sent through AWS SES. DELIVERY_RECEIPTS = "delivery-receipts" + DELIVERY_QUEUES = { + "sms": { + Priorities.LOW: SEND_SMS_LOW, + Priorities.MEDIUM: SEND_SMS_MEDIUM, + Priorities.HIGH: SEND_SMS_HIGH, + }, + "email": { + Priorities.LOW: SEND_EMAIL_LOW, + Priorities.MEDIUM: SEND_EMAIL_MEDIUM, + Priorities.HIGH: SEND_EMAIL_HIGH, + }, + "letter": { + Priorities.LOW: BULK, + Priorities.MEDIUM: NORMAL, + Priorities.HIGH: PRIORITY, + }, + } + @staticmethod def all_queues(): return [ @@ -80,13 +143,21 @@ def all_queues(): QueueNames.PERIODIC, QueueNames.BULK, QueueNames.DATABASE, - QueueNames.SEND_SMS, + QueueNames.PRIORITY_DATABASE, + QueueNames.NORMAL_DATABASE, + QueueNames.BULK_DATABASE, + QueueNames.SEND_SMS_HIGH, + QueueNames.SEND_SMS_MEDIUM, + QueueNames.SEND_SMS_LOW, QueueNames.SEND_THROTTLED_SMS, - QueueNames.SEND_EMAIL, + QueueNames.SEND_EMAIL_HIGH, + QueueNames.SEND_EMAIL_MEDIUM, + QueueNames.SEND_EMAIL_LOW, QueueNames.RESEARCH_MODE, QueueNames.REPORTING, QueueNames.JOBS, QueueNames.RETRY, + QueueNames.CALLBACKS_RETRY, QueueNames.NOTIFY, # QueueNames.CREATE_LETTERS_PDF, QueueNames.CALLBACKS, @@ -112,9 +183,12 @@ class Config(object): ADMIN_CLIENT_SECRET = os.getenv("ADMIN_CLIENT_SECRET") # encyption secret/salt - SECRET_KEY = os.getenv("SECRET_KEY") + SECRET_KEY = env.list("SECRET_KEY", []) DANGEROUS_SALT = os.getenv("DANGEROUS_SALT") + # API key prefix + API_KEY_PREFIX = "gcntfy-" + # DB conection string SQLALCHEMY_DATABASE_URI = os.getenv("SQLALCHEMY_DATABASE_URI") SQLALCHEMY_DATABASE_READER_URI = os.getenv("SQLALCHEMY_DATABASE_READER_URI") @@ -134,7 +208,8 @@ class Config(object): # URL of redis instance REDIS_URL = os.getenv("REDIS_URL") - REDIS_ENABLED = os.getenv("REDIS_ENABLED") == "1" + REDIS_PUBLISH_URL = os.getenv("REDIS_PUBLISH_URL", REDIS_URL) + REDIS_ENABLED = env.bool("REDIS_ENABLED", False) EXPIRE_CACHE_TEN_MINUTES = 600 EXPIRE_CACHE_EIGHT_DAYS = 8 * 24 * 60 * 60 @@ -142,16 +217,24 @@ class Config(object): PERFORMANCE_PLATFORM_ENABLED = False PERFORMANCE_PLATFORM_URL = "https://www.performance.service.gov.uk/data/govuk-notify/" - # Zendesk - ZENDESK_API_URL = os.getenv("ZENDESK_API_URL") - ZENDESK_API_KEY = os.getenv("ZENDESK_API_KEY") - ZENDESK_SELL_API_URL = os.getenv("ZENDESK_SELL_API_URL") - ZENDESK_SELL_API_KEY = os.getenv("ZENDESK_SELL_API_KEY") - # Freshdesk FRESH_DESK_PRODUCT_ID = os.getenv("FRESH_DESK_PRODUCT_ID") FRESH_DESK_API_URL = os.getenv("FRESH_DESK_API_URL") FRESH_DESK_API_KEY = os.getenv("FRESH_DESK_API_KEY") + FRESH_DESK_ENABLED = env.bool("FRESH_DESK_ENABLED", True) + + # Salesforce + SALESFORCE_DOMAIN = os.getenv("SALESFORCE_DOMAIN") + SALESFORCE_CLIENT_ID = os.getenv("SALESFORCE_CLIENT_ID", "Notify") + SALESFORCE_ENGAGEMENT_PRODUCT_ID = os.getenv("SALESFORCE_ENGAGEMENT_PRODUCT_ID") + SALESFORCE_ENGAGEMENT_RECORD_TYPE = os.getenv("SALESFORCE_ENGAGEMENT_RECORD_TYPE") + SALESFORCE_ENGAGEMENT_STANDARD_PRICEBOOK_ID = os.getenv("SALESFORCE_ENGAGEMENT_STANDARD_PRICEBOOK_ID") + SALESFORCE_GENERIC_ACCOUNT_ID = os.getenv("SALESFORCE_GENERIC_ACCOUNT_ID") + SALESFORCE_USERNAME = os.getenv("SALESFORCE_USERNAME") + SALESFORCE_PASSWORD = os.getenv("SALESFORCE_PASSWORD") + SALESFORCE_SECURITY_TOKEN = os.getenv("SALESFORCE_SECURITY_TOKEN") + CRM_GITHUB_PERSONAL_ACCESS_TOKEN = os.getenv("CRM_GITHUB_PERSONAL_ACCESS_TOKEN") + CRM_ORG_LIST_URL = os.getenv("CRM_ORG_LIST_URL") # Logging DEBUG = False @@ -161,13 +244,8 @@ class Config(object): CRONITOR_ENABLED = False CRONITOR_KEYS = json.loads(os.getenv("CRONITOR_KEYS", "{}")) - # Antivirus - MLWR_HOST = os.getenv("MLWR_HOST", False) - MLWR_USER = os.getenv("MLWR_USER", "") - MLWR_KEY = os.getenv("MLWR_KEY", "") - # PII check - SCAN_FOR_PII = os.getenv("SCAN_FOR_PII", False) + SCAN_FOR_PII = env.bool("SCAN_FOR_PII", False) # Documentation DOCUMENTATION_DOMAIN = os.getenv("DOCUMENTATION_DOMAIN", "documentation.notification.canada.ca") @@ -178,30 +256,41 @@ class Config(object): NOTIFY_ENVIRONMENT = os.getenv("NOTIFY_ENVIRONMENT", "development") ADMIN_CLIENT_USER_NAME = "notify-admin" + ATTACHMENT_NUM_LIMIT = env.int("ATTACHMENT_NUM_LIMIT", 10) # Limit of 10 attachments per notification. + ATTACHMENT_SIZE_LIMIT = env.int( + "ATTACHMENT_SIZE_LIMIT", 1024 * 1024 * 10 + ) # 10 megabytes limit by default per single attachment AWS_REGION = os.getenv("AWS_REGION", "us-east-1") AWS_ROUTE53_ZONE = os.getenv("AWS_ROUTE53_ZONE", "Z2OW036USASMAK") AWS_SES_REGION = os.getenv("AWS_SES_REGION", "us-east-1") AWS_SES_ACCESS_KEY = os.getenv("AWS_SES_ACCESS_KEY") AWS_SES_SECRET_KEY = os.getenv("AWS_SES_SECRET_KEY") AWS_PINPOINT_REGION = os.getenv("AWS_PINPOINT_REGION", "us-west-2") + AWS_PINPOINT_SC_POOL_ID = os.getenv("AWS_PINPOINT_SC_POOL_ID", "") + AWS_PINPOINT_DEFAULT_POOL_ID = os.getenv("AWS_PINPOINT_DEFAULT_POOL_ID", "") + AWS_PINPOINT_CONFIGURATION_SET_NAME = os.getenv("AWS_PINPOINT_CONFIGURATION_SET_NAME", "pinpoint-configuration") + AWS_PINPOINT_SC_TEMPLATE_IDS = env.list("AWS_PINPOINT_SC_TEMPLATE_IDS", []) AWS_US_TOLL_FREE_NUMBER = os.getenv("AWS_US_TOLL_FREE_NUMBER") CSV_UPLOAD_BUCKET_NAME = os.getenv("CSV_UPLOAD_BUCKET_NAME", "notification-alpha-canada-ca-csv-upload") - ASSET_UPLOAD_BUCKET_NAME = os.getenv("ASSET_UPLOAD_BUCKET_NAME", "notification-alpha-canada-ca-asset-upload") ASSET_DOMAIN = os.getenv("ASSET_DOMAIN", "assets.notification.canada.ca") INVITATION_EXPIRATION_DAYS = 2 NOTIFY_APP_NAME = "api" SQLALCHEMY_RECORD_QUERIES = False SQLALCHEMY_TRACK_MODIFICATIONS = False - SQLALCHEMY_POOL_SIZE = int(os.getenv("SQLALCHEMY_POOL_SIZE", 5)) + SQLALCHEMY_POOL_SIZE = env.int("SQLALCHEMY_POOL_SIZE", 5) SQLALCHEMY_POOL_TIMEOUT = 30 SQLALCHEMY_POOL_RECYCLE = 300 - SQLALCHEMY_ECHO = bool(os.getenv("SQLALCHEMY_ECHO", None)) + SQLALCHEMY_ECHO = env.bool("SQLALCHEMY_ECHO", False) PAGE_SIZE = 50 + PERSONALISATION_SIZE_LIMIT = env.int( + "PERSONALISATION_SIZE_LIMIT", 1024 * 50 + ) # 50k bytes limit by default for personalisation data per notification API_PAGE_SIZE = 250 TEST_MESSAGE_FILENAME = "Test message" ONE_OFF_MESSAGE_FILENAME = "Report" MAX_VERIFY_CODE_COUNT = 10 JOBS_MAX_SCHEDULE_HOURS_AHEAD = 96 + FAILED_LOGIN_LIMIT = os.getenv("FAILED_LOGIN_LIMIT", 10) # be careful increasing this size without being sure that we won't see slowness in pysftp MAX_LETTER_PDF_ZIP_FILESIZE = 40 * 1024 * 1024 # 40mb @@ -209,13 +298,16 @@ class Config(object): CHECK_PROXY_HEADER = False + # Notify's notifications templates NOTIFY_SERVICE_ID = "d6aa2c68-a2d9-4437-ab19-3ae8eb202553" NOTIFY_USER_ID = "6af522d0-2915-4e52-83a3-3690455a5fe6" INVITATION_EMAIL_TEMPLATE_ID = "4f46df42-f795-4cc4-83bb-65ca312f49cc" SMS_CODE_TEMPLATE_ID = "36fb0730-6259-4da1-8a80-c8de22ad4246" EMAIL_2FA_TEMPLATE_ID = "299726d2-dba6-42b8-8209-30e1d66ea164" + EMAIL_MAGIC_LINK_TEMPLATE_ID = "6e97fd09-6da0-4cc8-829d-33cf5b818103" NEW_USER_EMAIL_VERIFICATION_TEMPLATE_ID = "ece42649-22a8-4d06-b87f-d52d5d3f0a27" PASSWORD_RESET_TEMPLATE_ID = "474e9242-823b-4f99-813d-ed392e7f1201" + FORCED_PASSWORD_RESET_TEMPLATE_ID = "e9a65a6b-497b-42f2-8f43-1736e43e13b3" ALREADY_REGISTERED_EMAIL_TEMPLATE_ID = "0880fbb1-a0c6-46f0-9a8e-36c986381ceb" CHANGE_EMAIL_CONFIRMATION_TEMPLATE_ID = "eb4d9930-87ab-4aef-9bce-786762687884" SERVICE_NOW_LIVE_TEMPLATE_ID = "618185c6-3636-49cd-b7d2-6f6f5eb3bdde" @@ -234,11 +326,29 @@ class Config(object): NEAR_DAILY_LIMIT_TEMPLATE_ID = "5d3e4322-4ee6-457a-a710-c48755f6b643" REACHED_DAILY_LIMIT_TEMPLATE_ID = "fd29f796-fcdc-471b-a0d4-0093880d9173" DAILY_LIMIT_UPDATED_TEMPLATE_ID = "b3c766e6-be32-4edf-b8db-0f04ef404edc" - - # List of allowed service IDs that are allowed to send HTML through their - # templates. + NEAR_DAILY_SMS_LIMIT_TEMPLATE_ID = "a796568f-a89b-468e-b635-8105554301b9" + REACHED_DAILY_SMS_LIMIT_TEMPLATE_ID = "a646e614-c527-4f94-a955-ed7185d577f4" + DAILY_SMS_LIMIT_UPDATED_TEMPLATE_ID = "6ec12dd0-680a-4073-8d58-91d17cc8442f" + CONTACT_FORM_DIRECT_EMAIL_TEMPLATE_ID = "b04beb4a-8408-4280-9a5c-6a046b6f7704" + NEAR_DAILY_EMAIL_LIMIT_TEMPLATE_ID = "9aa60ad7-2d7f-46f0-8cbe-2bac3d4d77d8" + REACHED_DAILY_EMAIL_LIMIT_TEMPLATE_ID = "ee036547-e51b-49f1-862b-10ea982cfceb" + DAILY_EMAIL_LIMIT_UPDATED_TEMPLATE_ID = "97dade64-ea8d-460f-8a34-900b74ee5eb0" + APIKEY_REVOKE_TEMPLATE_ID = "a0a4e7b8-8a6a-4eaa-9f4e-9c3a5b2dbcf3" + HEARTBEAT_TEMPLATE_EMAIL_LOW = "73079cb9-c169-44ea-8cf4-8d397711cc9d" + HEARTBEAT_TEMPLATE_EMAIL_MEDIUM = "c75c4539-3014-4c4c-96b5-94d326758a74" + HEARTBEAT_TEMPLATE_EMAIL_HIGH = "276da251-3103-49f3-9054-cbf6b5d74411" + HEARTBEAT_TEMPLATE_SMS_LOW = "ab3a603b-d602-46ea-8c83-e05cb280b950" + HEARTBEAT_TEMPLATE_SMS_MEDIUM = "a48b54ce-40f6-4e4a-abe8-1e2fa389455b" + HEARTBEAT_TEMPLATE_SMS_HIGH = "4969a9e9-ddfd-476e-8b93-6231e6f1be4a" + DEFAULT_TEMPLATE_CATEGORY_LOW = "0dda24c2-982a-4f44-9749-0e38b2607e89" + DEFAULT_TEMPLATE_CATEGORY_MEDIUM = "f75d6706-21b7-437e-b93a-2c0ab771e28e" + DEFAULT_TEMPLATE_CATEGORY_HIGH = "c4f87d7c-a55b-4c0f-91fe-e56c65bb1871" + + # Allowed service IDs able to send HTML through their templates. ALLOW_HTML_SERVICE_IDS: List[str] = [id.strip() for id in os.getenv("ALLOW_HTML_SERVICE_IDS", "").split(",")] + BATCH_INSERTION_CHUNK_SIZE = int(os.getenv("BATCH_INSERTION_CHUNK_SIZE", 500)) + BROKER_URL = "sqs://" BROKER_TRANSPORT_OPTIONS = { "region": AWS_REGION, @@ -247,7 +357,7 @@ class Config(object): "queue_name_prefix": NOTIFICATION_QUEUE_PREFIX, } CELERY_ENABLE_UTC = True - CELERY_TIMEZONE = os.getenv("TIMEZONE", "America/Toronto") + CELERY_TIMEZONE = os.getenv("TIMEZONE", "UTC") CELERY_ACCEPT_CONTENT = ["json"] CELERY_TASK_SERIALIZER = "json" CELERY_IMPORTS = ( @@ -255,6 +365,7 @@ class Config(object): "app.celery.scheduled_tasks", "app.celery.reporting_tasks", "app.celery.nightly_tasks", + "app.celery.process_pinpoint_receipts_tasks", ) CELERYBEAT_SCHEDULE = { # app/celery/scheduled_tasks.py @@ -273,6 +384,11 @@ class Config(object): "schedule": timedelta(minutes=66), "options": {"queue": QueueNames.PERIODIC}, }, + "mark-jobs-complete": { + "task": "mark-jobs-complete", + "schedule": crontab(), + "options": {"queue": QueueNames.PERIODIC}, + }, "check-job-status": { "task": "check-job-status", "schedule": crontab(), @@ -283,96 +399,101 @@ class Config(object): "schedule": crontab(minute="0, 15, 30, 45"), "options": {"queue": QueueNames.PERIODIC}, }, + "in-flight-to-inbox": { + "task": "in-flight-to-inbox", + "schedule": 60, + "options": {"queue": QueueNames.PERIODIC}, + }, + "beat-inbox-sms-normal": { + "task": "beat-inbox-sms-normal", + "schedule": 10, + "options": {"queue": QueueNames.PERIODIC}, + }, + "beat-inbox-sms-bulk": { + "task": "beat-inbox-sms-bulk", + "schedule": 10, + "options": {"queue": QueueNames.PERIODIC}, + }, + "beat-inbox-sms-priority": { + "task": "beat-inbox-sms-priority", + "schedule": 10, + "options": {"queue": QueueNames.PERIODIC}, + }, + "beat-inbox-email-normal": { + "task": "beat-inbox-email-normal", + "schedule": 10, + "options": {"queue": QueueNames.PERIODIC}, + }, + "beat-inbox-email-bulk": { + "task": "beat-inbox-email-bulk", + "schedule": 10, + "options": {"queue": QueueNames.PERIODIC}, + }, + "beat-inbox-email-priority": { + "task": "beat-inbox-email-priority", + "schedule": 10, + "options": {"queue": QueueNames.PERIODIC}, + }, # app/celery/nightly_tasks.py "timeout-sending-notifications": { "task": "timeout-sending-notifications", - "schedule": crontab(hour=0, minute=5), + "schedule": crontab(hour=5, minute=5), # 00:05 EST in UTC "options": {"queue": QueueNames.PERIODIC}, }, "create-nightly-billing": { "task": "create-nightly-billing", - "schedule": crontab(hour=0, minute=15), + "schedule": crontab(hour=5, minute=15), # 00:15 EST in UTC "options": {"queue": QueueNames.REPORTING}, }, "create-nightly-notification-status": { "task": "create-nightly-notification-status", - "schedule": crontab(hour=0, minute=30), # after 'timeout-sending-notifications' + "schedule": crontab(hour=5, minute=30), # 00:30 EST in UTC, after 'timeout-sending-notifications' "options": {"queue": QueueNames.REPORTING}, }, "delete-sms-notifications": { "task": "delete-sms-notifications", - "schedule": crontab(hour=4, minute=15), # after 'create-nightly-notification-status' + "schedule": crontab(hour=9, minute=15), # 4:15 EST in UTC, after 'create-nightly-notification-status' "options": {"queue": QueueNames.PERIODIC}, }, "delete-email-notifications": { "task": "delete-email-notifications", - "schedule": crontab(hour=4, minute=30), # after 'create-nightly-notification-status' + "schedule": crontab(hour=9, minute=30), # 4:30 EST in UTC, after 'create-nightly-notification-status' "options": {"queue": QueueNames.PERIODIC}, }, "delete-letter-notifications": { "task": "delete-letter-notifications", - "schedule": crontab(hour=4, minute=45), # after 'create-nightly-notification-status' + "schedule": crontab(hour=9, minute=45), # 4:45 EST in UTC, after 'create-nightly-notification-status' "options": {"queue": QueueNames.PERIODIC}, }, "delete-inbound-sms": { "task": "delete-inbound-sms", - "schedule": crontab(hour=1, minute=40), + "schedule": crontab(hour=6, minute=40), # 1:40 EST in UTC "options": {"queue": QueueNames.PERIODIC}, }, "send-daily-performance-platform-stats": { "task": "send-daily-performance-platform-stats", - "schedule": crontab(hour=2, minute=0), + "schedule": crontab(hour=7, minute=0), # 2:00 EST in UTC "options": {"queue": QueueNames.PERIODIC}, }, "remove_transformed_dvla_files": { "task": "remove_transformed_dvla_files", - "schedule": crontab(hour=3, minute=40), + "schedule": crontab(hour=8, minute=40), # 3:40 EST in UTC "options": {"queue": QueueNames.PERIODIC}, }, "remove_sms_email_jobs": { "task": "remove_sms_email_jobs", - "schedule": crontab(hour=4, minute=0), + "schedule": crontab(hour=9, minute=0), # 4:00 EST in UTC "options": {"queue": QueueNames.PERIODIC}, }, - # 'remove_letter_jobs': { - # 'task': 'remove_letter_jobs', - # 'schedule': crontab(hour=4, minute=20), - # since we mark jobs as archived - # 'options': {'queue': QueueNames.PERIODIC}, - # }, - # 'check-templated-letter-state': { - # 'task': 'check-templated-letter-state', - # 'schedule': crontab(day_of_week='mon-fri', hour=9, minute=0), - # 'options': {'queue': QueueNames.PERIODIC} - # }, - # 'check-precompiled-letter-state': { - # 'task': 'check-precompiled-letter-state', - # 'schedule': crontab(day_of_week='mon-fri', hour='9,15', minute=0), - # 'options': {'queue': QueueNames.PERIODIC} - # }, - # 'raise-alert-if-letter-notifications-still-sending': { - # 'task': 'raise-alert-if-letter-notifications-still-sending', - # 'schedule': crontab(hour=16, minute=30), - # 'options': {'queue': QueueNames.PERIODIC} - # }, - # The collate-letter-pdf does assume it is called in an hour that BST does not make a - # difference to the truncate date which translates to the filename to process - # 'collate-letter-pdfs-for-day': { - # 'task': 'collate-letter-pdfs-for-day', - # 'schedule': crontab(hour=17, minute=50), - # 'options': {'queue': QueueNames.PERIODIC} - # }, - # 'raise-alert-if-no-letter-ack-file': { - # 'task': 'raise-alert-if-no-letter-ack-file', - # 'schedule': crontab(hour=23, minute=00), - # 'options': {'queue': QueueNames.PERIODIC} - # }, } CELERY_QUEUES: List[Any] = [] + CELERY_DELIVER_SMS_RATE_LIMIT = os.getenv("CELERY_DELIVER_SMS_RATE_LIMIT", "1/s") + + CONTACT_FORM_EMAIL_ADDRESS = os.getenv("CONTACT_FORM_EMAIL_ADDRESS", "helpdesk@cds-snc.ca") FROM_NUMBER = "development" - STATSD_HOST = os.getenv("STATSD_HOST") + STATSD_HOST = os.getenv("STATSD_HOST") # CloudWatch agent, shared with embedded metrics STATSD_PORT = 8125 STATSD_ENABLED = bool(STATSD_HOST) @@ -386,6 +507,10 @@ class Config(object): SIMULATED_SMS_NUMBERS = ("+16132532222", "+16132532223", "+16132532224") + # Match with scripts/internal_stress_test/internal_stress_test.py + INTERNAL_TEST_NUMBER = "+16135550123" + INTERNAL_TEST_EMAIL_ADDRESS = "internal.test@cds-snc.ca" + DVLA_BUCKETS = { "job": "{}-dvla-file-per-job".format(os.getenv("NOTIFY_ENVIRONMENT", "development")), "notification": "{}-dvla-letter-api-files".format(os.getenv("NOTIFY_ENVIRONMENT", "development")), @@ -421,8 +546,55 @@ class Config(object): CSV_MAX_ROWS_BULK_SEND = os.getenv("CSV_MAX_ROWS_BULK_SEND", 100_000) CSV_BULK_REDIRECT_THRESHOLD = os.getenv("CSV_BULK_REDIRECT_THRESHOLD", 200) - # feature flag to toggle persistance of notification in celery instead of the API - FF_NOTIFICATION_CELERY_PERSISTENCE = os.getenv("FF_NOTIFICATION_CELERY_PERSISTENCE", False) + # Endpoint of Cloudwatch agent running as a side car in EKS listening for embedded metrics + CLOUDWATCH_AGENT_EMF_PORT = 25888 + CLOUDWATCH_AGENT_ENDPOINT = os.getenv("CLOUDWATCH_AGENT_ENDPOINT", f"tcp://{STATSD_HOST}:{CLOUDWATCH_AGENT_EMF_PORT}") + + # Bounce Rate parameters + BR_VOLUME_MINIMUM = 1000 + BR_WARNING_PERCENTAGE = 0.05 + BR_CRITICAL_PERCENTAGE = 0.1 + + # Feature flags for bounce rate + # Timestamp in epoch milliseconds to seed the bounce rate. We will seed data for (24, the below config) included. + FF_BOUNCE_RATE_SEED_EPOCH_MS = os.getenv("FF_BOUNCE_RATE_SEED_EPOCH_MS", False) + # Feature flag to enable custom retry policies such as lowering retry period for certain priority lanes. + FF_CELERY_CUSTOM_TASK_PARAMS = env.bool("FF_CELERY_CUSTOM_TASK_PARAMS", True) + FF_CLOUDWATCH_METRICS_ENABLED = env.bool("FF_CLOUDWATCH_METRICS_ENABLED", False) + FF_SALESFORCE_CONTACT = env.bool("FF_SALESFORCE_CONTACT", False) + FF_TEMPLATE_CATEGORY = env.bool("FF_TEMPLATE_CATEGORY", False) + + # SRE Tools auth keys + SRE_USER_NAME = "SRE_CLIENT_USER" + SRE_CLIENT_SECRET = os.getenv("SRE_CLIENT_SECRET") + + @classmethod + def get_sensitive_config(cls) -> list[str]: + "List of config keys that contain sensitive information" + return [ + "ADMIN_CLIENT_SECRET", + "SECRET_KEY", + "DANGEROUS_SALT", + "SQLALCHEMY_DATABASE_URI", + "SQLALCHEMY_DATABASE_READER_URI", + "SQLALCHEMY_BINDS", + "REDIS_URL", + "FRESH_DESK_API_KEY", + "AWS_SES_ACCESS_KEY", + "AWS_SES_SECRET_KEY", + "ROUTE_SECRET_KEY_1", + "ROUTE_SECRET_KEY_2", + "SALESFORCE_PASSWORD", + "SALESFORCE_SECURITY_TOKEN", + "TEMPLATE_PREVIEW_API_KEY", + "DOCUMENT_DOWNLOAD_API_KEY", + "SRE_CLIENT_SECRET", + ] + + @classmethod + def get_safe_config(cls) -> dict[str, Any]: + "Returns a dict of config keys and values with sensitive values masked" + return logging.get_class_attrs(cls, cls.get_sensitive_config()) ###################### @@ -442,8 +614,9 @@ class Development(Config): TRANSIENT_UPLOADED_LETTERS = "development-transient-uploaded-letters" ADMIN_CLIENT_SECRET = os.getenv("ADMIN_CLIENT_SECRET", "dev-notify-secret-key") - SECRET_KEY = os.getenv("SECRET_KEY", "dev-notify-secret-key") + SECRET_KEY = env.list("SECRET_KEY", ["dev-notify-secret-key"]) DANGEROUS_SALT = os.getenv("DANGEROUS_SALT", "dev-notify-salt ") + SRE_CLIENT_SECRET = os.getenv("SRE_CLIENT_SECRET", "dev-notify-secret-key") NOTIFY_ENVIRONMENT = "development" NOTIFICATION_QUEUE_PREFIX = os.getenv("NOTIFICATION_QUEUE_PREFIX", "notification-canada-ca") @@ -451,8 +624,9 @@ class Development(Config): SQLALCHEMY_DATABASE_URI = os.getenv("SQLALCHEMY_DATABASE_URI", "postgresql://postgres@localhost/notification_api") REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379/0") + REDIS_PUBLISH_URL = os.getenv("REDIS_PUBLISH_URL", REDIS_URL) - ANTIVIRUS_ENABLED = os.getenv("ANTIVIRUS_ENABLED") == "1" + ANTIVIRUS_ENABLED = env.bool("ANTIVIRUS_ENABLED", False) for queue in QueueNames.all_queues(): Config.CELERY_QUEUES.append(Queue(queue, Exchange("default"), routing_key=queue)) @@ -491,9 +665,9 @@ class Test(Development): API_HOST_NAME = "http://localhost:6011" TEMPLATE_PREVIEW_API_HOST = "http://localhost:9999" - - # FEATURE FLAGS - FF_BATCH_INSERTION = os.getenv("FF_BATCH_INSERTION", False) + CRM_GITHUB_PERSONAL_ACCESS_TOKEN = "test-token" + CRM_ORG_LIST_URL = "https://test-url.com" + FAILED_LOGIN_LIMIT = 0 class Production(Config): @@ -517,9 +691,19 @@ class Staging(Production): NOTIFY_ENVIRONMENT = "staging" +class Scratch(Production): + NOTIFY_ENVIRONMENT = "scratch" + + +class Dev(Production): + NOTIFY_ENVIRONMENT = "dev" + + configs = { "development": Development, "test": Test, "production": Production, "staging": Staging, + "scratch": Scratch, + "dev": Dev, } diff --git a/app/dao/annual_billing_dao.py b/app/dao/annual_billing_dao.py index 8f90f8d5cf..80c1dff2a6 100644 --- a/app/dao/annual_billing_dao.py +++ b/app/dao/annual_billing_dao.py @@ -39,7 +39,6 @@ def dao_update_annual_billing_for_future_years(service_id, free_sms_fragment_lim def dao_get_free_sms_fragment_limit_for_year(service_id, financial_year_start=None): - if not financial_year_start: financial_year_start = get_current_financial_year_start_year() @@ -47,7 +46,6 @@ def dao_get_free_sms_fragment_limit_for_year(service_id, financial_year_start=No def dao_get_all_free_sms_fragment_limit(service_id): - return ( AnnualBilling.query.filter_by( service_id=service_id, diff --git a/app/dao/api_key_dao.py b/app/dao/api_key_dao.py index a8880a3485..1c9348d018 100644 --- a/app/dao/api_key_dao.py +++ b/app/dao/api_key_dao.py @@ -1,14 +1,56 @@ import uuid from datetime import datetime, timedelta +from flask import current_app +from itsdangerous import BadSignature from sqlalchemy import func, or_ from sqlalchemy.orm import joinedload +from sqlalchemy.orm.exc import NoResultFound -from app import db, encryption +from app import db, signer_api_key from app.dao.dao_utils import transactional, version_class from app.models import ApiKey +@transactional +def resign_api_keys(resign: bool, unsafe: bool = False): + """Resign the _secret column of the api_keys table with (potentially) a new key. + + Args: + resign (bool): whether to resign the api keys + unsafe (bool, optional): resign regardless of whether the unsign step fails with a BadSignature. + Defaults to False. + + Raises: + e: BadSignature if the unsign step fails and unsafe is False. + """ + rows = ApiKey.query.all() # noqa + current_app.logger.info(f"Total of {len(rows)} api keys") + rows_to_update = [] + + for row in rows: + try: + old_signature = row._secret + unsigned_secret = getattr(row, "secret") # unsign the secret + except BadSignature as e: + if unsafe: + unsigned_secret = signer_api_key.verify_unsafe(row._secret) + else: + current_app.logger.error(f"BadSignature for api_key {row.id}, using verify_unsafe instead") + raise e + setattr(row, "secret", unsigned_secret) # resigns the api key secret with (potentially) a new signing secret + if old_signature != row._secret: + rows_to_update.append(row) + if not resign: + row._secret = old_signature # reset the signature to the old value + + if resign: + current_app.logger.info(f"Resigning {len(rows_to_update)} api keys") + db.session.bulk_save_objects(rows) + elif not resign: + current_app.logger.info(f"{len(rows_to_update)} api keys need resigning") + + @transactional @version_class(ApiKey) def save_model_api_key(api_key): @@ -26,8 +68,46 @@ def expire_api_key(service_id, api_key_id): db.session.add(api_key) -def get_api_key_by_secret(secret): - return db.on_reader().query(ApiKey).filter_by(_secret=encryption.encrypt(str(secret))).options(joinedload("service")).one() +@transactional +def update_last_used_api_key(api_key_id, last_used=None) -> None: + api_key = ApiKey.query.filter_by(id=api_key_id).one() + api_key.last_used_timestamp = last_used if last_used else datetime.utcnow() + db.session.add(api_key) + + +@transactional +@version_class(ApiKey) +def update_compromised_api_key_info(service_id, api_key_id, compromised_info): + api_key = ApiKey.query.filter_by(id=api_key_id, service_id=service_id).one() + api_key.compromised_key_info = compromised_info + db.session.add(api_key) + + +def get_api_key_by_secret(secret, service_id=None): + # Check the first part of the secret is the gc prefix + if current_app.config["API_KEY_PREFIX"] != secret[: len(current_app.config["API_KEY_PREFIX"])]: + raise ValueError() + + # Check if the remaining part of the secret is a the valid api key + token = secret[-36:] + signed_with_all_keys = signer_api_key.sign_with_all_keys(str(token)) + for signed_secret in signed_with_all_keys: + try: + api_key = db.on_reader().query(ApiKey).filter_by(_secret=signed_secret).options(joinedload("service")).one() + except NoResultFound: + raise NoResultFound() + + # Check the middle portion of the secret is the valid service id + if api_key and api_key.service_id: + if len(secret) >= 79: + service_id_from_token = str(secret[-73:-37]) + if str(api_key.service_id) != service_id_from_token: + raise ValueError() + else: + raise ValueError() + if api_key: + return api_key + raise NoResultFound() def get_model_api_keys(service_id, id=None): diff --git a/app/dao/dao_utils.py b/app/dao/dao_utils.py index 1ef0fd81b6..ff388dae70 100644 --- a/app/dao/dao_utils.py +++ b/app/dao/dao_utils.py @@ -30,14 +30,12 @@ def __init__(self, model_class, history_class=None, must_write_history=True): def version_class(*version_options): - if len(version_options) == 1 and not isinstance(version_options[0], VersionOptions): version_options = (VersionOptions(version_options[0]),) def versioned(func): @wraps(func) def record_version(*args, **kwargs): - func(*args, **kwargs) session_objects = [] diff --git a/app/dao/date_util.py b/app/dao/date_util.py index 41b5ed6a74..42afbf15bc 100644 --- a/app/dao/date_util.py +++ b/app/dao/date_util.py @@ -69,7 +69,7 @@ def get_current_financial_year_start_year(): def get_financial_year_for_datetime(start_date): - if type(start_date) == date: + if type(start_date) is date: start_date = datetime.combine(start_date, time.min) year = int(start_date.strftime("%Y")) @@ -77,3 +77,30 @@ def get_financial_year_for_datetime(start_date): return year - 1 else: return year + + +def get_midnight(datetime: datetime) -> datetime: + return datetime.replace(hour=0, minute=0, second=0, microsecond=0) + + +def tz_aware_utc_now() -> datetime: + """ + Returns a localized, EST/EDT timezone aware, UTC now datetime. + Call dst() on the returned object to determine daylight savings status. + """ + return pytz.utc.localize(datetime.utcnow()) + + +def tz_aware_midnight_n_days_ago(days_ago: int = 1) -> datetime: + """ + Returns an EST/EDT aware UTC midnight date a number of days ago. + """ + est = pytz.timezone("US/Eastern") + return est.localize(tz_aware_utc_now().replace(tzinfo=None) - timedelta(days=days_ago)) + + +def utc_midnight_n_days_ago(number_of_days): + """ + Returns utc midnight a number of days ago. + """ + return get_midnight(datetime.utcnow() - timedelta(days=number_of_days)) diff --git a/app/dao/email_branding_dao.py b/app/dao/email_branding_dao.py index d8738e9200..1ed90ae1e6 100644 --- a/app/dao/email_branding_dao.py +++ b/app/dao/email_branding_dao.py @@ -3,7 +3,9 @@ from app.models import EmailBranding -def dao_get_email_branding_options(): +def dao_get_email_branding_options(filter_by_organisation_id=None): + if filter_by_organisation_id: + return EmailBranding.query.filter_by(organisation_id=filter_by_organisation_id).all() return EmailBranding.query.all() diff --git a/app/dao/fact_billing_dao.py b/app/dao/fact_billing_dao.py index f9980b6fa3..fd9f3710f1 100644 --- a/app/dao/fact_billing_dao.py +++ b/app/dao/fact_billing_dao.py @@ -69,7 +69,6 @@ def fetch_sms_free_allowance_remainder(start_date): def fetch_sms_billing_for_all_services(start_date, end_date): - # ASSUMPTION: AnnualBilling has been populated for year. free_allowance_remainder = fetch_sms_free_allowance_remainder(start_date).subquery() diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index 3d827b4a3f..0da5e1b143 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -1,13 +1,13 @@ from datetime import datetime, time, timedelta from flask import current_app -from notifications_utils.timezones import convert_local_timezone_to_utc from sqlalchemy import Date, case, func from sqlalchemy.dialects.postgresql import insert from sqlalchemy.sql.expression import extract, literal from sqlalchemy.types import DateTime, Integer from app import db +from app.dao.date_util import tz_aware_midnight_n_days_ago, utc_midnight_n_days_ago from app.models import ( EMAIL_TYPE, KEY_TYPE_NORMAL, @@ -32,16 +32,14 @@ User, ) from app.utils import ( - get_local_timezone_midnight, get_local_timezone_midnight_in_utc, get_local_timezone_month_from_utc_column, - midnight_n_days_ago, ) def fetch_notification_status_for_day(process_day, service_id=None): - start_date = convert_local_timezone_to_utc(datetime.combine(process_day, time.min)) - end_date = convert_local_timezone_to_utc(datetime.combine(process_day + timedelta(days=1), time.min)) + start_date = datetime.combine(process_day, time.min) + end_date = datetime.combine(process_day + timedelta(days=1), time.min) # use notification_history if process day is older than 7 days # this is useful if we need to rebuild the ft_billing table for a date older than 7 days ago. current_app.logger.info("Fetch ft_notification_status for {} to {}".format(start_date, end_date)) @@ -85,6 +83,7 @@ def query_for_fact_status_data(table, start_date, end_date, notification_type, s table.key_type, table.status, func.count().label("notification_count"), + func.sum(table.billable_units).label("billable_units"), ) .filter( table.created_at >= start_date, @@ -119,6 +118,7 @@ def update_fact_notification_status(data, process_day): key_type=row.key_type, notification_status=row.status, notification_count=row.notification_count, + billable_units=row.billable_units, ) db.session.connection().execute(stmt) db.session.commit() @@ -148,8 +148,8 @@ def fetch_notification_status_for_service_by_month(start_date, end_date, service ) -def fetch_delivered_notification_stats_by_month(): - return ( +def fetch_delivered_notification_stats_by_month(filter_heartbeats=None): + query = ( db.session.query( func.date_trunc("month", FactNotificationStatus.bst_date).cast(db.Text).label("month"), FactNotificationStatus.notification_type, @@ -168,8 +168,12 @@ def fetch_delivered_notification_stats_by_month(): func.date_trunc("month", FactNotificationStatus.bst_date).desc(), FactNotificationStatus.notification_type, ) - .all() ) + if filter_heartbeats: + query = query.filter( + FactNotificationStatus.service_id != current_app.config["NOTIFY_SERVICE_ID"], + ) + return query.all() def fetch_notification_stats_for_trial_services(): @@ -238,16 +242,28 @@ def fetch_notification_status_for_service_for_day(bst_day, service_id): def fetch_notification_status_for_service_for_today_and_7_previous_days(service_id, by_template=False, limit_days=7): - start_date = midnight_n_days_ago(limit_days) - now = datetime.now() + if limit_days == 1: + ft_start_date = utc_midnight_n_days_ago(limit_days - 1) + # For daily stats, service limits reset at 12:00am UTC each night, so we need to fetch the data from 12:00 UTC to now + start = utc_midnight_n_days_ago(0) + end = datetime.utcnow() + else: + ft_start_date = utc_midnight_n_days_ago(limit_days) + + # The nightly task that populates ft_notification_status counts collects notifications from + # 5AM the day before to 5AM of the current day. So we need to match that timeframe when + # we fetch notifications for the current day. + start = (tz_aware_midnight_n_days_ago(1) + timedelta(hours=5)).replace(minute=0, second=0, microsecond=0) + end = (tz_aware_midnight_n_days_ago(0) + timedelta(hours=5)).replace(minute=0, second=0, microsecond=0) + stats_for_7_days = db.session.query( FactNotificationStatus.notification_type.label("notification_type"), FactNotificationStatus.notification_status.label("status"), *([FactNotificationStatus.template_id.label("template_id")] if by_template else []), - FactNotificationStatus.notification_count.label("count"), + *([FactNotificationStatus.notification_count.label("count")]), ).filter( FactNotificationStatus.service_id == service_id, - FactNotificationStatus.bst_date >= start_date, + FactNotificationStatus.bst_date >= ft_start_date, FactNotificationStatus.key_type != KEY_TYPE_TEST, ) @@ -256,10 +272,11 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days(service_ Notification.notification_type.cast(db.Text), Notification.status, *([Notification.template_id] if by_template else []), - func.count().label("count"), + *([func.count().label("count")]), ) .filter( - Notification.created_at >= get_local_timezone_midnight(now), + Notification.created_at >= start, + Notification.created_at <= end, Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST, ) @@ -328,18 +345,21 @@ def get_total_notifications_sent_for_api_key(api_key_id): def get_last_send_for_api_key(api_key_id): """ + SELECT last_used_timestamp as last_notification_created + FROM api_keys + WHERE id = 'api_key_id'; + + If last_used_timestamp is null, then check notifications table/ or notification_history. SELECT max(created_at) as last_notification_created FROM notifications WHERE api_key_id = 'api_key_id' GROUP BY api_key_id; """ - - return ( - db.session.query(func.max(Notification.created_at).label("last_notification_created")) - .filter(Notification.api_key_id == api_key_id) - .group_by(Notification.api_key_id) - .all() + # Fetch last_used_timestamp from api_keys table + api_key_table = ( + db.session.query(ApiKey.last_used_timestamp.label("last_notification_created")).filter(ApiKey.id == api_key_id).all() ) + return [] if api_key_table[0][0] is None else api_key_table def get_api_key_ranked_by_notifications_created(n_days_back): diff --git a/app/dao/inbound_sms_dao.py b/app/dao/inbound_sms_dao.py index 3dbbdf2486..d92fcf2f7f 100644 --- a/app/dao/inbound_sms_dao.py +++ b/app/dao/inbound_sms_dao.py @@ -1,14 +1,54 @@ from flask import current_app +from itsdangerous import BadSignature from notifications_utils.statsd_decorators import statsd from sqlalchemy import and_, desc from sqlalchemy.orm import aliased -from app import db +from app import db, signer_inbound_sms from app.dao.dao_utils import transactional from app.models import SMS_TYPE, InboundSms, Service, ServiceDataRetention from app.utils import midnight_n_days_ago +@transactional +def resign_inbound_sms(resign: bool, unsafe: bool = False): + """Resign the _content column of the inbound_sms table with (potentially) a new key. + + Args: + resign (bool): whether to resign the inbound sms + unsafe (bool, optional): resign regardless of whether the unsign step fails with a BadSignature. + Defaults to False. + + Raises: + e: BadSignature if the unsign step fails and unsafe is False. + """ + rows = InboundSms.query.all() # noqa + current_app.logger.info(f"Total of {len(rows)} inbound sms") + rows_to_update = [] + + for row in rows: + try: + old_signature = row._content + unsigned_content = getattr(row, "content") # unsign the content + except BadSignature as e: + if unsafe: + unsigned_content = signer_inbound_sms.verify_unsafe(row._content) + else: + current_app.logger.error(f"BadSignature for inbound_sms {row.id}") + raise e + setattr(row, "content", unsigned_content) # resigns the content with (potentially) a new signing secret + if old_signature != row._content: + rows_to_update.append(row) + if not resign: + row._content = old_signature # reset the signature to the old value + + if resign: + current_app.logger.info(f"Resigning {len(rows_to_update)} inbound sms") + db.session.bulk_save_objects(rows) + elif not resign: + current_app.logger.info(f"{len(rows_to_update)} inbound sms need resigning") + + @transactional def dao_create_inbound_sms(inbound_sms): db.session.add(inbound_sms) diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 3fb0a3dc4e..28a8b1f15d 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -1,5 +1,6 @@ import uuid from datetime import datetime, timedelta +from typing import Iterable from flask import current_app from notifications_utils.letter_timings import ( @@ -15,6 +16,7 @@ from app.models import ( JOB_STATUS_CANCELLED, JOB_STATUS_FINISHED, + JOB_STATUS_IN_PROGRESS, JOB_STATUS_PENDING, JOB_STATUS_SCHEDULED, LETTER_TYPE, @@ -22,6 +24,7 @@ NOTIFICATION_CREATED, Job, Notification, + NotificationHistory, ServiceDataRetention, Template, ) @@ -30,16 +33,22 @@ @statsd(namespace="dao") def dao_get_notification_outcomes_for_job(service_id, job_id): - return ( + notification = ( db.session.query(func.count(Notification.status).label("count"), Notification.status) .filter(Notification.service_id == service_id, Notification.job_id == job_id) .group_by(Notification.status) - .all() + ) + notification_history = ( + db.session.query(func.count(NotificationHistory.status).label("count"), NotificationHistory.status) + .filter(NotificationHistory.service_id == service_id, NotificationHistory.job_id == job_id) + .group_by(NotificationHistory.status) ) + return notification.union(notification_history).all() + def dao_get_job_by_service_id_and_job_id(service_id, job_id): - return Job.query.filter_by(service_id=service_id, id=job_id).one() + return Job.query.filter_by(service_id=service_id, id=job_id).first() def dao_get_jobs_by_service_id(service_id, limit_days=None, page=1, page_size=50, statuses=None): @@ -63,12 +72,22 @@ def dao_get_job_by_id(job_id) -> Job: return Job.query.filter_by(id=job_id).one() -def dao_archive_job(job): - job.archived = True - db.session.add(job) +def dao_archive_jobs(jobs: Iterable[Job]): + """ + Archive the given jobs. + Args: + jobs (Iterable[Job]): The jobs to archive. + """ + for job in jobs: + job.archived = True + db.session.add(job) db.session.commit() +def dao_get_in_progress_jobs(): + return Job.query.filter(Job.job_status == JOB_STATUS_IN_PROGRESS).all() + + def dao_set_scheduled_jobs_to_pending(): """ Sets all past scheduled jobs to pending, and then returns them for further processing. @@ -117,7 +136,7 @@ def dao_update_job(job): db.session.commit() -def dao_get_jobs_older_than_data_retention(notification_types): +def dao_get_jobs_older_than_data_retention(notification_types, limit=None): flexible_data_retention = ServiceDataRetention.query.filter( ServiceDataRetention.notification_type.in_(notification_types) ).all() @@ -125,8 +144,7 @@ def dao_get_jobs_older_than_data_retention(notification_types): today = datetime.utcnow().date() for f in flexible_data_retention: end_date = today - timedelta(days=f.days_of_retention) - - jobs.extend( + query = ( Job.query.join(Template) .filter( func.coalesce(Job.scheduled_for, Job.created_at) < end_date, @@ -135,13 +153,15 @@ def dao_get_jobs_older_than_data_retention(notification_types): Job.service_id == f.service_id, ) .order_by(desc(Job.created_at)) - .all() ) + if limit: + query = query.limit(limit - len(jobs)) + jobs.extend(query.all()) end_date = today - timedelta(days=7) for notification_type in notification_types: services_with_data_retention = [x.service_id for x in flexible_data_retention if x.notification_type == notification_type] - jobs.extend( + query = ( Job.query.join(Template) .filter( func.coalesce(Job.scheduled_for, Job.created_at) < end_date, @@ -150,8 +170,10 @@ def dao_get_jobs_older_than_data_retention(notification_types): Job.service_id.notin_(services_with_data_retention), ) .order_by(desc(Job.created_at)) - .all() ) + if limit: + query = query.limit(limit - len(jobs)) + jobs.extend(query.all()) return jobs diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 8d5c4d667a..e88df7cbee 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -2,8 +2,8 @@ import string from datetime import datetime, timedelta -from boto.exception import BotoClientError from flask import current_app +from itsdangerous import BadSignature from notifications_utils.international_billing_rates import INTERNATIONAL_BILLING_RATES from notifications_utils.recipients import ( InvalidEmailError, @@ -19,21 +19,21 @@ from sqlalchemy.dialects.postgresql import insert from sqlalchemy.orm import joinedload from sqlalchemy.orm.exc import NoResultFound -from sqlalchemy.sql import functions +from sqlalchemy.sql import functions, literal_column from sqlalchemy.sql.expression import case from werkzeug.datastructures import MultiDict -from app import create_uuid, db -from app.aws.s3 import get_s3_bucket_objects, remove_s3_object +from app import create_uuid, db, signer_personalisation from app.dao.dao_utils import transactional +from app.dao.date_util import utc_midnight_n_days_ago from app.errors import InvalidRequest -from app.letters.utils import LETTERS_PDF_FILE_LOCATION_STRUCTURE from app.models import ( EMAIL_TYPE, KEY_TYPE_TEST, LETTER_TYPE, NOTIFICATION_CREATED, NOTIFICATION_DELIVERED, + NOTIFICATION_HARD_BOUNCE, NOTIFICATION_PENDING, NOTIFICATION_PENDING_VIRUS_CHECK, NOTIFICATION_PERMANENT_FAILURE, @@ -49,11 +49,86 @@ Service, ServiceDataRetention, ) -from app.utils import ( - escape_special_characters, - get_local_timezone_midnight_in_utc, - midnight_n_days_ago, -) +from app.utils import escape_special_characters, get_local_timezone_midnight_in_utc + + +@transactional +def _resign_notifications_chunk(chunk_offset: int, chunk_size: int, resign: bool, unsafe: bool) -> int: + """Resign the _personalisation column of the notifications in a chunk of notifications with (potentially) a new key. + + Args: + chunk_offset (int): start index of the chunk + chunk_size (int): size of the chunk + resign (bool): resign the personalisation + unsafe (bool): ignore bad signatures + + Raises: + e: BadSignature if the unsign step fails and unsafe is False. + + Returns: + int: number of notifications resigned or needing to be resigned + """ + rows = Notification.query.order_by(Notification.created_at).slice(chunk_offset, chunk_offset + chunk_size).all() + current_app.logger.info(f"Processing chunk {chunk_offset} to {chunk_offset + len(rows) - 1}") + + rows_to_update = [] + for row in rows: + old_signature = row._personalisation + if old_signature: + try: + unsigned_personalisation = getattr(row, "personalisation") # unsign the personalisation + except BadSignature as e: + if unsafe: + unsigned_personalisation = signer_personalisation.verify_unsafe(row._personalisation) + else: + current_app.logger.warning(f"BadSignature for notification {row.id}: {e}") + raise e + setattr( + row, "personalisation", unsigned_personalisation + ) # resigns the personalisation with (potentially) a new signing secret + if old_signature != row._personalisation: + rows_to_update.append(row) + if not resign: + row._personalisation = old_signature # reset the signature to the old value + + if resign and len(rows_to_update) > 0: + current_app.logger.info(f"Resigning {len(rows_to_update)} notifications") + db.session.bulk_save_objects(rows) + elif len(rows_to_update) > 0: + current_app.logger.info(f"{len(rows_to_update)} notifications need resigning") + + return len(rows_to_update) + + +def resign_notifications(chunk_size: int, resign: bool, unsafe: bool = False) -> int: + """Resign the _personalisation column of the notifications table with (potentially) a new key. + + Args: + chunk_size (int): number of rows to update at once. + resign (bool): resign the notifications. + unsafe (bool, optional): resign regardless of whether the unsign step fails with a BadSignature. Defaults to False. + max_update_size(int, -1): max number of rows to update at once, -1 for no limit. Defautls to -1. + + Returns: + int: number of notifications that were resigned or need to be resigned. + + Raises: + e: BadSignature if the unsign step fails and unsafe is False. + """ + + total_notifications = Notification.query.count() + current_app.logger.info(f"Total of {total_notifications} notifications") + num_old_signatures = 0 + + for chunk_offset in range(0, total_notifications, chunk_size): + num_old_signatures_in_chunk = _resign_notifications_chunk(chunk_offset, chunk_size, resign, unsafe) + num_old_signatures += num_old_signatures_in_chunk + + if resign: + current_app.logger.info(f"Overall, {num_old_signatures} notifications were resigned") + else: + current_app.logger.info(f"Overall, {num_old_signatures} notifications need resigning") + return num_old_signatures @statsd(namespace="dao") @@ -123,11 +198,16 @@ def country_records_delivery(phone_prefix): return dlr and dlr.lower() == "yes" -def _update_notification_status(notification, status, provider_response=None): +def _update_notification_status(notification, status, provider_response=None, bounce_response=None): status = _decide_permanent_temporary_failure(current_status=notification.status, status=status) notification.status = status if provider_response: notification.provider_response = provider_response + if bounce_response: + notification.feedback_type = bounce_response.get("feedback_type") + notification.feedback_subtype = bounce_response.get("feedback_subtype") + notification.ses_feedback_id = bounce_response.get("ses_feedback_id") + notification.ses_feedback_date = bounce_response.get("ses_feedback_date") dao_update_notification(notification) return notification @@ -196,24 +276,30 @@ def get_notifications_for_job(service_id, job_id, filter_dict=None, page=1, page return query.order_by(asc(Notification.job_row_number)).paginate(page=page, per_page=page_size) +@statsd(namespace="dao") +def get_notification_count_for_job(service_id, job_id): + return Notification.query.filter_by(service_id=service_id, job_id=job_id).count() + + @statsd(namespace="dao") def get_notification_with_personalisation(service_id, notification_id, key_type): filter_dict = {"service_id": service_id, "id": notification_id} if key_type: filter_dict["key_type"] = key_type - return Notification.query.filter_by(**filter_dict).options(joinedload("template")).one() + try: + return Notification.query.filter_by(**filter_dict).options(joinedload("template")).one() + except NoResultFound: + current_app.logger.warning(f"Failed to get notification with filter: {filter_dict}") + return None @statsd(namespace="dao") -def get_notification_by_id(notification_id, service_id=None, _raise=False): +def get_notification_by_id(notification_id, service_id=None, _raise=False) -> Notification: filters = [Notification.id == notification_id] - if service_id: filters.append(Notification.service_id == service_id) - query = db.on_reader().query(Notification).filter(*filters) - return query.one() if _raise else query.first() @@ -243,7 +329,7 @@ def get_notifications_for_service( filters = [Notification.service_id == service_id] if limit_days is not None: - filters.append(Notification.created_at >= midnight_n_days_ago(limit_days)) + filters.append(Notification.created_at >= utc_midnight_n_days_ago(limit_days)) if older_than is not None: older_than_created_at = db.session.query(Notification.created_at).filter(Notification.id == older_than).as_scalar() @@ -302,9 +388,6 @@ def delete_notifications_older_than_retention_by_type(notification_type, qry_lim convert_utc_to_local_timezone(datetime.utcnow()).date() ) - timedelta(days=f.days_of_retention) - if notification_type == LETTER_TYPE: - _delete_letters_from_s3(notification_type, f.service_id, days_of_retention, qry_limit) - insert_update_notification_history(notification_type, days_of_retention, f.service_id) current_app.logger.info("Deleting {} notifications for service id: {}".format(notification_type, f.service_id)) @@ -318,9 +401,8 @@ def delete_notifications_older_than_retention_by_type(notification_type, qry_lim services_with_data_retention = [x.service_id for x in flexible_data_retention] service_ids_to_purge = db.session.query(Service.id).filter(Service.id.notin_(services_with_data_retention)).all() - for service_id in service_ids_to_purge: - if notification_type == LETTER_TYPE: - _delete_letters_from_s3(notification_type, service_id, seven_days_ago, qry_limit) + for row in service_ids_to_purge: + service_id = row._mapping["id"] insert_update_notification_history(notification_type, seven_days_ago, service_id) deleted += _delete_notifications(notification_type, seven_days_ago, service_id, qry_limit) @@ -373,7 +455,7 @@ def _delete_for_query(subquery): def insert_update_notification_history(notification_type, date_to_delete_from, service_id): - notifications = db.session.query(*[x.name for x in NotificationHistory.__table__.c]).filter( + notifications = db.session.query(*[literal_column(x.name) for x in NotificationHistory.__table__.c]).filter( Notification.notification_type == notification_type, Notification.service_id == service_id, Notification.created_at < date_to_delete_from, @@ -396,38 +478,6 @@ def insert_update_notification_history(notification_type, date_to_delete_from, s db.session.commit() -def _delete_letters_from_s3(notification_type, service_id, date_to_delete_from, query_limit): - letters_to_delete_from_s3 = ( - db.session.query(Notification) - .filter( - Notification.notification_type == notification_type, - Notification.created_at < date_to_delete_from, - Notification.service_id == service_id, - ) - .limit(query_limit) - .all() - ) - for letter in letters_to_delete_from_s3: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - if letter.sent_at: - sent_at = str(letter.sent_at.date()) - prefix = LETTERS_PDF_FILE_LOCATION_STRUCTURE.format( - folder=sent_at + "/", - reference=letter.reference, - duplex="D", - letter_class="2", - colour="C", - crown="C" if letter.service.crown else "N", - date="", - ).upper()[:-5] - s3_objects = get_s3_bucket_objects(bucket_name=bucket_name, subfolder=prefix) - for s3_object in s3_objects: - try: - remove_s3_object(bucket_name, s3_object["Key"]) - except BotoClientError: - current_app.logger.exception("Could not delete S3 object with filename: {}".format(s3_object["Key"])) - - @statsd(namespace="dao") @transactional def dao_delete_notifications_by_id(notification_id): @@ -658,6 +708,11 @@ def dao_get_total_notifications_sent_per_day_for_performance_platform(start_date ) +@statsd(namespace="dao") +def get_latest_sent_notification_for_job(job_id): + return Notification.query.filter(Notification.job_id == job_id).order_by(Notification.updated_at.desc()).limit(1).first() + + @statsd(namespace="dao") def dao_get_last_notification_added_for_job_id(job_id): last_notification_added = ( @@ -734,12 +789,12 @@ def _duplicate_update_warning(notification, status): def send_method_stats_by_service(start_time, end_time): return ( db.session.query( - Service.id, - Service.name, - Organisation.name, + Service.id.label("service_id"), + Service.name.label("service_name"), + Organisation.name.label("organisation_name"), NotificationHistory.notification_type, case([(NotificationHistory.api_key_id.isnot(None), "api")], else_="admin").label("send_method"), - func.count().label("nb_notifications"), + func.count().label("total_notifications"), ) .join(Service, Service.id == NotificationHistory.service_id) .join(Organisation, Organisation.id == Service.organisation_id) @@ -758,3 +813,100 @@ def send_method_stats_by_service(start_time, end_time): ) .all() ) + + +@statsd(namespace="dao") +@transactional +def overall_bounce_rate_for_day(min_emails_sent=1000, default_time=datetime.utcnow()): + """ + This function returns the bounce rate for all services for the last 24 hours. + The bounce rate is calculated by dividing the number of hard bounces by the total number of emails sent. + The bounce rate is returned as a percentage. + + :param min_emails_sent: the minimum number of emails sent to calculate the bounce rate for + :param default_time: the time to calculate the bounce rate for + :return: a list of tuple of the service_id, total number of email, # of hard bounces and the bounce rate + """ + twenty_four_hours_ago = default_time - timedelta(hours=24) + query = ( + db.session.query( + Notification.service_id.label("service_id"), + func.count(Notification.id).label("total_emails"), + func.count().filter(Notification.feedback_type == NOTIFICATION_HARD_BOUNCE).label("hard_bounces"), + ) + .filter(Notification.created_at.between(twenty_four_hours_ago, default_time)) # this value is the `[bounce-rate-window]` + .group_by(Notification.service_id) + .having( + func.count(Notification.id) >= min_emails_sent + ) # -- this value is the `[bounce-rate-warning-notification-volume-minimum]` + .subquery() + ) + data = db.session.query(query, (100 * query.c.hard_bounces / query.c.total_emails).label("bounce_rate")).all() + return data + + +@statsd(namespace="dao") +@transactional +def service_bounce_rate_for_day(service_id, min_emails_sent=1000, default_time=datetime.utcnow()): + """ + This function returns the bounce rate for a single services for the last 24 hours. + The bounce rate is calculated by dividing the number of hard bounces by the total number of emails sent. + The bounce rate is returned as a percentage. + + :param service_id: the service id to calculate the bounce rate for + :param min_emails_sent: the minimum number of emails sent to calculate the bounce rate for + :param default_time: the time to calculate the bounce rate for + :return: a tuple of the total number of emails sent, # of bounced emails and the bounce rate or None if not enough emails + """ + twenty_four_hours_ago = default_time - timedelta(hours=24) + query = ( + db.session.query( + func.count(Notification.id).label("total_emails"), + func.count().filter(Notification.feedback_type == NOTIFICATION_HARD_BOUNCE).label("hard_bounces"), + ) + .filter(Notification.created_at.between(twenty_four_hours_ago, default_time)) # this value is the `[bounce-rate-window]` + .filter(Notification.service_id == service_id) + .having( + func.count(Notification.id) >= min_emails_sent + ) # -- this value is the `[bounce-rate-warning-notification-volume-minimum]` + .subquery() + ) + data = db.session.query(query, (100 * query.c.hard_bounces / query.c.total_emails).label("bounce_rate")).first() + return data + + +@statsd(namespace="dao") +@transactional +def total_notifications_grouped_by_hour(service_id, default_time=datetime.utcnow(), interval: int = 24): + twenty_four_hours_ago = default_time - timedelta(hours=interval) + query = ( + db.session.query( + func.date_trunc("hour", Notification.created_at).label("hour"), + func.count(Notification.id).label("total_notifications"), + ) + .filter(Notification.created_at.between(twenty_four_hours_ago, default_time)) + .filter(Notification.service_id == service_id) + .filter(Notification.notification_type == EMAIL_TYPE) + .group_by(func.date_trunc("hour", Notification.created_at)) + .order_by(func.date_trunc("hour", Notification.created_at)) + ) + return query.all() + + +@statsd(namespace="dao") +@transactional +def total_hard_bounces_grouped_by_hour(service_id, default_time=datetime.utcnow(), interval: int = 24): + twenty_four_hours_ago = default_time - timedelta(hours=interval) + query = ( + db.session.query( + func.date_trunc("hour", Notification.created_at).label("hour"), + func.count(Notification.id).label("total_notifications"), + ) + .filter(Notification.created_at.between(twenty_four_hours_ago, default_time)) + .filter(Notification.service_id == service_id) + .filter(Notification.notification_type == EMAIL_TYPE) + .filter(Notification.feedback_type == NOTIFICATION_HARD_BOUNCE) + .group_by(func.date_trunc("hour", Notification.created_at)) + .order_by(func.date_trunc("hour", Notification.created_at)) + ) + return query.all() diff --git a/app/dao/organisation_dao.py b/app/dao/organisation_dao.py index c8b9bc052a..06ed25958d 100644 --- a/app/dao/organisation_dao.py +++ b/app/dao/organisation_dao.py @@ -2,7 +2,14 @@ from app import db from app.dao.dao_utils import transactional, version_class -from app.models import Domain, InvitedOrganisationUser, Organisation, Service, User +from app.models import ( + Domain, + EmailBranding, + InvitedOrganisationUser, + Organisation, + Service, + User, +) def dao_get_organisations(): @@ -32,18 +39,16 @@ def dao_get_organisation_by_id(organisation_id): def dao_get_organisation_by_email_address(email_address): - email_address = email_address.lower().replace(".gsi.gov.uk", ".gov.uk") for domain in Domain.query.order_by(func.char_length(Domain.domain).desc()).all(): - if email_address.endswith("@{}".format(domain.domain)) or email_address.endswith(".{}".format(domain.domain)): return Organisation.query.filter_by(id=domain.organisation_id).one() return None -def dao_get_organisation_by_service_id(service_id): +def dao_get_organisation_by_service_id(service_id) -> Organisation: return Organisation.query.join(Organisation.services).filter_by(id=service_id).first() @@ -54,13 +59,15 @@ def dao_create_organisation(organisation): @transactional def dao_update_organisation(organisation_id, **kwargs): - domains = kwargs.pop("domains", None) num_updated = Organisation.query.filter_by(id=organisation_id).update(kwargs) + if "email_branding_id" in kwargs: + email_brand = EmailBranding.query.filter_by(id=kwargs["email_branding_id"]).one() + org = Organisation.query.get(organisation_id) + org.email_branding = email_brand if isinstance(domains, list): - Domain.query.filter_by(organisation_id=organisation_id).delete() db.session.bulk_save_objects([Domain(domain=domain.lower(), organisation_id=organisation_id) for domain in domains]) diff --git a/app/dao/provider_details_dao.py b/app/dao/provider_details_dao.py index c2c92ffa9d..24a3519bb2 100644 --- a/app/dao/provider_details_dao.py +++ b/app/dao/provider_details_dao.py @@ -76,7 +76,6 @@ def dao_switch_sms_provider_to_provider_with_identifier(identifier): def get_provider_details_by_notification_type(notification_type, supports_international=False): - filters = [ProviderDetails.notification_type == notification_type] if supports_international: diff --git a/app/dao/service_callback_api_dao.py b/app/dao/service_callback_api_dao.py index 75b7161df8..e77a28e841 100644 --- a/app/dao/service_callback_api_dao.py +++ b/app/dao/service_callback_api_dao.py @@ -1,6 +1,9 @@ from datetime import datetime -from app import create_uuid, db +from flask import current_app +from itsdangerous import BadSignature + +from app import create_uuid, db, signer_bearer_token from app.dao.dao_utils import transactional, version_class from app.models import ( COMPLAINT_CALLBACK_TYPE, @@ -9,6 +12,46 @@ ) +@transactional +def resign_service_callbacks(resign: bool, unsafe: bool = False): + """Resign the _bearer_token column of the service_callbacks table with (potentially) a new key. + + Args: + resign (bool): whether to resign the service_callbacks + unsafe (bool, optional): resign regardless of whether the unsign step fails with a BadSignature. + Defaults to False. + + Raises: + e: BadSignature if the unsign step fails and unsafe is False. + """ + rows = ServiceCallbackApi.query.all() # noqa + current_app.logger.info(f"Total of {len(rows)} service callbacks") + rows_to_update = [] + + for row in rows: + if row._bearer_token: + try: + old_signature = row._bearer_token + unsigned_token = getattr(row, "bearer_token") # unsign the token + except BadSignature as e: + if unsafe: + unsigned_token = signer_bearer_token.verify_unsafe(row._bearer_token) + else: + current_app.logger.error(f"BadSignature for service_callback {row.id}") + raise e + setattr(row, "bearer_token", unsigned_token) # resigns the token with (potentially) a new signing secret + if old_signature != row._bearer_token: + rows_to_update.append(row) + if not resign: + row._bearer_token = old_signature # reset the signature to the old value + + if resign: + current_app.logger.info(f"Resigning {len(rows_to_update)} service callbacks") + db.session.bulk_save_objects(rows) + elif not resign: + current_app.logger.info(f"{len(rows_to_update)} service callbacks need resigning") + + @transactional @version_class(ServiceCallbackApi) def save_service_callback_api(service_callback_api): diff --git a/app/dao/service_email_reply_to_dao.py b/app/dao/service_email_reply_to_dao.py index cf09868486..13b850b246 100644 --- a/app/dao/service_email_reply_to_dao.py +++ b/app/dao/service_email_reply_to_dao.py @@ -68,7 +68,11 @@ def archive_reply_to_email_address(service_id, reply_to_id): reply_to_archive = ServiceEmailReplyTo.query.filter_by(id=reply_to_id, service_id=service_id).one() if reply_to_archive.is_default: - raise ArchiveValidationError("You cannot delete a default email reply to address") + non_archived_reply_tos = dao_get_reply_to_by_service_id(service_id) + if len(non_archived_reply_tos) > 1: + # this error should not be displayed in Admin, only if a user manually sends a post request + raise ArchiveValidationError("You cannot delete a default email reply to address if other reply to addresses exist") + reply_to_archive.is_default = False reply_to_archive.archived = True diff --git a/app/dao/service_user_dao.py b/app/dao/service_user_dao.py index 7a6ea9687d..aed9cfe7ef 100644 --- a/app/dao/service_user_dao.py +++ b/app/dao/service_user_dao.py @@ -8,8 +8,11 @@ def dao_get_service_user(user_id, service_id): def dao_get_active_service_users(service_id): - query = ServiceUser.query.join(ServiceUser.user).filter(ServiceUser.service_id == service_id, User.state == "active") - + query = ( + db.session.query(ServiceUser) + .join(User, User.id == ServiceUser.user_id) + .filter(User.state == "active", ServiceUser.service_id == service_id) + ) return query.all() diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 79e4942fb8..281bfa6cf9 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -1,8 +1,8 @@ import json import uuid from datetime import date, datetime, timedelta -from typing import Tuple, Union +import pytz from flask import current_app from notifications_utils.clients.redis import service_cache_key from notifications_utils.statsd_decorators import statsd @@ -12,7 +12,7 @@ from app import db, redis_store from app.dao.dao_utils import VersionOptions, transactional, version_class -from app.dao.date_util import get_current_financial_year +from app.dao.date_util import get_current_financial_year, get_midnight from app.dao.email_branding_dao import dao_get_email_branding_by_name from app.dao.letter_branding_dao import dao_get_letter_branding_by_name from app.dao.organisation_dao import dao_get_organisation_by_email_address @@ -23,6 +23,7 @@ CROWN_ORGANISATION_TYPES, EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, + JOB_STATUS_SCHEDULED, KEY_TYPE_TEST, NHS_ORGANISATION_TYPES, NON_CROWN_ORGANISATION_TYPES, @@ -46,6 +47,7 @@ User, VerifyCode, ) +from app.service.utils import add_pt_data_retention, get_organisation_by_id from app.utils import ( email_address_is_nhs, escape_special_characters, @@ -82,7 +84,7 @@ def dao_count_live_services(): ).count() -def dao_fetch_live_services_data(): +def dao_fetch_live_services_data(filter_heartbeats=None): year_start_date, year_end_date = get_current_financial_year() most_recent_annual_billing = ( @@ -174,8 +176,11 @@ def dao_fetch_live_services_data(): AnnualBilling.free_sms_fragment_limit, ) .order_by(asc(Service.go_live_at)) - .all() ) + + if filter_heartbeats: + data = data.filter(Service.id != current_app.config["NOTIFY_SERVICE_ID"]) + data = data.all() results = [] for row in data: existing_service = next((x for x in results if x["service_id"] == row.service_id), None) @@ -189,12 +194,12 @@ def dao_fetch_live_services_data(): return results -def dao_fetch_service_by_id(service_id, only_active=False, use_cache=False) -> Union[Service, Tuple[Service, dict]]: +def dao_fetch_service_by_id(service_id, only_active=False, use_cache=False) -> Service: if use_cache: service_cache = redis_store.get(service_cache_key(service_id)) if service_cache: service_cache_decoded = json.loads(service_cache.decode("utf-8"))["data"] - return Service.from_json(service_cache_decoded), service_cache_decoded + return Service.from_json(service_cache_decoded) query = Service.query.filter_by(id=service_id).options(joinedload("users")) if only_active: @@ -273,6 +278,7 @@ def dao_create_service( user, service_id=None, service_permissions=None, + organisation_id=None, ): # the default property does not appear to work when there is a difference between the sqlalchemy schema and the # db schema (ie: during a migration), so we have to set sms_sender manually here. After the GOVUK sms_sender @@ -284,7 +290,10 @@ def dao_create_service( if service_permissions is None: service_permissions = DEFAULT_SERVICE_PERMISSIONS - organisation = dao_get_organisation_by_email_address(user.email_address) + if organisation_id: + organisation = get_organisation_by_id(organisation_id) + else: + organisation = dao_get_organisation_by_email_address(user.email_address) from app.dao.permissions_dao import permission_dao @@ -304,12 +313,13 @@ def dao_create_service( if organisation: service.organisation_id = organisation.id service.organisation_type = organisation.organisation_type - if organisation.email_branding: - service.email_branding = organisation.email_branding if organisation.letter_branding and not service.letter_branding: service.letter_branding = organisation.letter_branding + if organisation.organisation_type == "province_or_territory": + add_pt_data_retention(service.id) + elif service.organisation_type in NHS_ORGANISATION_TYPES or email_address_is_nhs(user.email_address): service.email_branding = dao_get_email_branding_by_name("NHS") service.letter_branding = dao_get_letter_branding_by_name("NHS") @@ -415,20 +425,67 @@ def dao_fetch_todays_stats_for_service(service_id): def fetch_todays_total_message_count(service_id): + midnight = get_midnight(datetime.now(tz=pytz.utc)) + scheduled = ( + db.session.query(func.coalesce(func.sum(Job.notification_count), 0).label("count")).filter( + Job.service_id == service_id, + Job.job_status == JOB_STATUS_SCHEDULED, + Job.scheduled_for >= midnight, + Job.scheduled_for < midnight + timedelta(days=1), + ) + ).first() + result = ( - db.session.query(func.count(Notification.id).label("count")) - .filter( + db.session.query(func.coalesce(func.count(Notification.id), 0).label("count")).filter( Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST, - func.date(Notification.created_at) == date.today(), + Notification.created_at >= midnight, ) - .group_by( - Notification.notification_type, - Notification.status, + ).first() + + return result.count + scheduled.count + + +def fetch_todays_total_sms_count(service_id): + midnight = get_midnight(datetime.now(tz=pytz.utc)) + result = ( + db.session.query(func.count(Notification.id).label("total_sms_notifications")) + .filter( + Notification.service_id == service_id, + Notification.key_type != KEY_TYPE_TEST, + Notification.created_at > midnight, + Notification.notification_type == "sms", ) .first() ) - return 0 if result is None else result.count + return 0 if result is None or result.total_sms_notifications is None else result.total_sms_notifications + + +def fetch_service_email_limit(service_id: uuid.UUID) -> int: + return Service.query.get(service_id).message_limit + + +def fetch_todays_total_email_count(service_id: uuid.UUID) -> int: + midnight = get_midnight(datetime.now(tz=pytz.utc)) + scheduled = ( + db.session.query(func.coalesce(func.sum(Job.notification_count), 0).label("total_scheduled_notifications")).filter( + Job.service_id == service_id, + Job.job_status == JOB_STATUS_SCHEDULED, + Job.scheduled_for > midnight, + Job.scheduled_for < midnight + timedelta(hours=23, minutes=59, seconds=59), + ) + ).first() + + result = ( + db.session.query(func.coalesce(func.count(Notification.id), 0).label("total_email_notifications")).filter( + Notification.service_id == service_id, + Notification.key_type != KEY_TYPE_TEST, + Notification.created_at > midnight, + Notification.notification_type == "email", + ) + ).first() + + return result.total_email_notifications + scheduled.total_scheduled_notifications def _stats_for_service_query(service_id): @@ -436,7 +493,7 @@ def _stats_for_service_query(service_id): db.session.query( Notification.notification_type, Notification.status, - func.count(Notification.id).label("count"), + *([func.count(Notification.id).label("count")]), ) .filter( Notification.service_id == service_id, diff --git a/app/dao/template_categories_dao.py b/app/dao/template_categories_dao.py new file mode 100644 index 0000000000..ea584df8ba --- /dev/null +++ b/app/dao/template_categories_dao.py @@ -0,0 +1,89 @@ +import uuid +from datetime import datetime + +from flask import current_app + +from app import db +from app.dao.dao_utils import transactional +from app.errors import InvalidRequest +from app.models import Template, TemplateCategory + + +@transactional +def dao_create_template_category(template_category: TemplateCategory): + if template_category.id is None: + template_category.id = uuid.uuid4() + db.session.add(template_category) + + +def dao_get_template_category_by_id(template_category_id) -> TemplateCategory: + return TemplateCategory.query.filter_by(id=template_category_id).one() + + +def dao_get_template_category_by_template_id(template_id) -> TemplateCategory: + return Template.query.filter_by(id=template_id).one().template_category + + +# TODO: Add filters: Select all template categories used by at least 1 sms/email template +def dao_get_all_template_categories(template_type=None, hidden=None): + query = TemplateCategory.query + + if template_type is not None: + query = query.join(Template).filter(Template.template_type == template_type) + + if hidden is not None: + query = query.filter(TemplateCategory.hidden == hidden) + + return query.all() + + +@transactional +def dao_update_template_category(template_category: TemplateCategory): + db.session.add(template_category) + db.session.commit() + + +@transactional +def dao_delete_template_category_by_id(template_category_id, cascade=False): + """ + Deletes a `TemplateCategory`. By default, if the `TemplateCategory` is associated with any `Template`, it will not be deleted. + If the `cascade` option is specified then the category will be forcible removed: + 1. The `Category` will be dissociated from templates that use it + 2. The `Template` is assigned to one of the default categories that matches the priority of the deleted category + 3. Finally the `Category` will be deleted + + Args: + template_category_id (str): The id of the template_category to delete + cascade (bool, optional): Specify whether to dissociate the category from templates that use it to force removal. Defaults to False. + """ + template_category = dao_get_template_category_by_id(template_category_id) + templates = Template.query.filter_by(template_category_id=template_category_id).all() + + if templates and not cascade: + raise InvalidRequest( + "Cannot delete categories associated with templates. Dissociate the category from templates first.", 400 + ) + + if templates and cascade: + # When there are templates and we are cascading, we set the category to a default + # that matches the template's previous category's priority + for template in templates: + # Get the a default category that matches the previous priority of the template, based on template type + default_category_id = _get_default_category_id( + template_category.sms_process_type if template.template_type == "sms" else template_category.email_process_type + ) + template.template_category_id = default_category_id + template.updated_at = datetime.utcnow() + db.session.add(template) + db.session.commit() + + db.session.delete(template_category) + + +def _get_default_category_id(process_type): + default_categories = { + "bulk": current_app.config["DEFAULT_TEMPLATE_CATEGORY_LOW"], + "normal": current_app.config["DEFAULT_TEMPLATE_CATEGORY_MEDIUM"], + "priority": current_app.config["DEFAULT_TEMPLATE_CATEGORY_HIGH"], + } + return default_categories.get(process_type, current_app.config["DEFAULT_TEMPLATE_CATEGORY_LOW"]) diff --git a/app/dao/templates_dao.py b/app/dao/templates_dao.py index 112c7fcbe5..8f06080986 100644 --- a/app/dao/templates_dao.py +++ b/app/dao/templates_dao.py @@ -1,7 +1,7 @@ import json import uuid from datetime import datetime -from typing import Tuple, Union +from typing import Union from flask import current_app from notifications_utils.clients.redis import template_version_cache_key @@ -21,12 +21,13 @@ @transactional @version_class(VersionOptions(Template, history_class=TemplateHistory)) -def dao_create_template(template): - template.id = uuid.uuid4() # must be set now so version history model can use same id +def dao_create_template(template, redact_personalisation=False): + # must be set now so version history model can use same id + template.id = uuid.uuid4() redacted_dict = { "template": template, - "redact_personalisation": False, + "redact_personalisation": redact_personalisation, } if template.created_by: redacted_dict.update({"updated_by": template.created_by}) @@ -41,9 +42,6 @@ def dao_create_template(template): @transactional @version_class(VersionOptions(Template, history_class=TemplateHistory)) def dao_update_template(template): - if template.archived: - template.folder = None - db.session.add(template) @@ -80,6 +78,71 @@ def dao_update_template_reply_to(template_id, reply_to): return template +@transactional +def dao_update_template_process_type(template_id, process_type): + Template.query.filter_by(id=template_id).update( + { + "process_type": process_type, + } + ) + template = Template.query.filter_by(id=template_id).one() + + history = TemplateHistory( + **{ + "id": template.id, + "name": template.name, + "template_type": template.template_type, + "created_at": template.created_at, + "updated_at": template.updated_at, + "content": template.content, + "service_id": template.service_id, + "subject": template.subject, + "postage": template.postage, + "created_by_id": template.created_by_id, + "version": template.version, + "archived": template.archived, + "process_type": template.process_type, + "service_letter_contact_id": template.service_letter_contact_id, + } + ) + db.session.add(history) + return template + + +@transactional +def dao_update_template_category(template_id, category_id): + Template.query.filter_by(id=template_id).update( + { + "template_category_id": category_id, + "updated_at": datetime.utcnow(), + "version": Template.version + 1, + } + ) + + template = Template.query.filter_by(id=template_id).one() + + history = TemplateHistory( + **{ + "id": template.id, + "name": template.name, + "template_type": template.template_type, + "created_at": template.created_at, + "updated_at": template.updated_at, + "content": template.content, + "service_id": template.service_id, + "subject": template.subject, + "postage": template.postage, + "created_by_id": template.created_by_id, + "version": template.version, + "archived": template.archived, + "process_type": template.process_type, + "service_letter_contact_id": template.service_letter_contact_id, + } + ) + db.session.add(history) + return template + + @transactional def dao_redact_template(template, user_id): template.template_redacted.redact_personalisation = True @@ -94,10 +157,7 @@ def dao_get_template_by_id_and_service_id(template_id, service_id, version=None) return db.on_reader().query(Template).filter_by(id=template_id, hidden=False, service_id=service_id).one() -def dao_get_template_by_id( - template_id, version=None, use_cache=False -) -> Union[Union[Template, TemplateHistory], Tuple[Union[Template, TemplateHistory], dict]]: - +def dao_get_template_by_id(template_id, version=None, use_cache=False) -> Union[Template, TemplateHistory]: if use_cache: # When loading a SQLAlchemy object from cache it is in the transient state. # We do not add it to the session. This would defeat the purpose of using the cache. @@ -109,11 +169,10 @@ def dao_get_template_by_id( if template_cache: template_cache_decoded = json.loads(template_cache.decode("utf-8"))["data"] if version: - return TemplateHistory.from_json(template_cache_decoded), template_cache_decoded + return TemplateHistory.from_json(template_cache_decoded) else: - return Template.from_json(template_cache_decoded), template_cache_decoded - - elif version is not None: + return Template.from_json(template_cache_decoded) + if version is not None: return TemplateHistory.query.filter_by(id=template_id, version=version).one() return Template.query.filter_by(id=template_id).one() diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py index 92405df159..456648933b 100644 --- a/app/dao/users_dao.py +++ b/app/dao/users_dao.py @@ -23,7 +23,7 @@ def create_secret_code(): return "".join(map(str, [SystemRandom().randrange(10) for i in range(5)])) -def save_user_attribute(usr, update_dict={}): +def save_user_attribute(usr: User, update_dict={}): if "blocked" in update_dict and update_dict["blocked"]: update_dict.update({"current_session_id": "00000000-0000-0000-0000-000000000000"}) @@ -31,10 +31,11 @@ def save_user_attribute(usr, update_dict={}): db.session.commit() -def save_model_user(usr, update_dict={}, pwd=None): +def save_model_user(usr: User, update_dict={}, pwd=None): if pwd: usr.password = pwd usr.password_changed_at = datetime.utcnow() + if update_dict: _remove_values_for_keys_if_present(update_dict, ["id", "password_changed_at"]) db.session.query(User).filter_by(id=usr.id).update(update_dict) @@ -135,6 +136,7 @@ def update_user_password(user, password): # reset failed login count - they've just reset their password so should be fine user.password = password user.password_changed_at = datetime.utcnow() + user.password_expired = False db.session.add(user) db.session.commit() diff --git a/app/dbsetup.py b/app/dbsetup.py index 52e53282cc..31a1a91d50 100644 --- a/app/dbsetup.py +++ b/app/dbsetup.py @@ -1,11 +1,32 @@ from functools import cached_property, partial -from typing import Optional +from typing import Any, Optional -from flask import _app_ctx_stack # type: ignore +import greenlet # type: ignore +import sqlalchemy.types as types from flask_sqlalchemy import BaseQuery, SignallingSession, SQLAlchemy, get_state from sqlalchemy import orm +# adapted from https://r2c.dev/blog/2020/fixing-leaky-logs-how-to-find-a-bug-and-ensure-it-never-returns/ +class SensitiveString(types.TypeDecorator): + """ + String column type for use with SQLAlchemy models whose + content should not appear in logs or exceptions + """ + + impl = types.String + + class Repr(str): + def __repr__(self) -> str: + return "********" + + def process_bind_param(self, value: Optional[str], dialect: Any) -> Optional[Repr]: + return self.Repr(value) if value else None + + def process_result_value(self, value: Optional[Repr], dialect: Any) -> Optional[str]: + return str(value) if value else None + + class ExplicitRoutingSession(SignallingSession): """ This session implementation will route to explicitly named bind. @@ -47,12 +68,13 @@ def binds_setup(self): class RoutingSQLAlchemy(SQLAlchemy): + SensitiveString = SensitiveString + def on_reader(self): return self.session().using_bind("reader") def create_scoped_session(self, options=None): options = options or {} - scopefunc = options.pop("scopefunc", _app_ctx_stack.__ident_func__) options.setdefault("query_cls", BaseQuery) - return orm.scoped_session(partial(ExplicitRoutingSession, self, **options), scopefunc=scopefunc) + return orm.scoped_session(partial(ExplicitRoutingSession, self, **options), scopefunc=greenlet.getcurrent) diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py index 35ae60fa6a..5bcaed1f34 100644 --- a/app/delivery/send_to_providers.py +++ b/app/delivery/send_to_providers.py @@ -1,10 +1,11 @@ +import base64 import os import re -import urllib.request from datetime import datetime -from typing import Dict +from typing import Any, Dict, Optional from uuid import UUID +import phonenumbers from flask import current_app from notifications_utils.recipients import ( validate_and_format_email_address, @@ -15,19 +16,26 @@ PlainTextEmailTemplate, SMSMessageTemplate, ) +from unidecode import unidecode +from urllib3 import PoolManager +from urllib3.util import Retry -from app import clients, statsd_client +from app import bounce_rate_client, clients, document_download_client, statsd_client from app.celery.research_mode_tasks import send_email_response, send_sms_response -from app.clients.mlwr.mlwr import check_mlwr_score +from app.clients.sms import SmsSendingVehicles +from app.config import Config from app.dao.notifications_dao import dao_update_notification from app.dao.provider_details_dao import ( dao_toggle_sms_provider, get_provider_details_by_notification_type, ) +from app.dao.template_categories_dao import dao_get_template_category_by_id from app.dao.templates_dao import dao_get_template_by_id from app.exceptions import ( + DocumentDownloadException, InvalidUrlException, - MalwarePendingException, + MalwareDetectedException, + MalwareScanInProgressException, NotificationTechnicalFailureException, ) from app.models import ( @@ -37,11 +45,15 @@ EMAIL_TYPE, KEY_TYPE_TEST, NOTIFICATION_CONTAINS_PII, + NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_SENDING, NOTIFICATION_SENT, NOTIFICATION_TECHNICAL_FAILURE, NOTIFICATION_VIRUS_SCAN_FAILED, + PINPOINT_PROVIDER, SMS_TYPE, + SNS_PROVIDER, + BounceRateStatus, Notification, Service, ) @@ -52,15 +64,18 @@ def send_sms_to_provider(notification): service = notification.service if not service.active: - technical_failure(notification=notification) + inactive_service_failure(notification=notification) return + # If the notification was not sent already, the status should be created. if notification.status == "created": provider = provider_to_use( SMS_TYPE, notification.id, + notification.to, notification.international, notification.reply_to_text, + template_id=notification.template_id, ) template_dict = dao_get_template_by_id(notification.template_id, notification.template_version).__dict__ @@ -80,13 +95,32 @@ def send_sms_to_provider(notification): notification.reference = send_sms_response(provider.get_name(), notification.to) update_notification_to_sending(notification, provider) + elif ( + validate_and_format_phone_number(notification.to, international=notification.international) + == Config.INTERNAL_TEST_NUMBER + ): + current_app.logger.info(f"notification {notification.id} sending to internal test number. Not sending to AWS") + notification.reference = send_sms_response(provider.get_name(), notification.to) + notification.billable_units = template.fragment_count + update_notification_to_sending(notification, provider) + else: try: + template_category_id = template_dict.get("template_category_id") + if current_app.config["FF_TEMPLATE_CATEGORY"] and template_category_id is not None: + sending_vehicle = SmsSendingVehicles( + dao_get_template_category_by_id(template_category_id).sms_sending_vehicle + ) + else: + sending_vehicle = None reference = provider.send_sms( to=validate_and_format_phone_number(notification.to, international=notification.international), content=str(template), reference=str(notification.id), sender=notification.reply_to_text, + template_id=notification.template_id, + service_id=notification.service_id, + sending_vehicle=sending_vehicle, ) except Exception as e: notification.billable_units = template.fragment_count @@ -96,7 +130,10 @@ def send_sms_to_provider(notification): else: notification.reference = reference notification.billable_units = template.fragment_count - update_notification_to_sending(notification, provider) + if reference == "opted_out": + update_notification_to_opted_out(notification, provider) + else: + update_notification_to_sending(notification, provider) # Record StatsD stats to compute SLOs statsd_client.timing_with_dates("sms.total-time", notification.sent_at, notification.created_at) @@ -125,11 +162,84 @@ def check_file_url(file_info: Dict[str, str], notification_id: UUID): raise InvalidUrlException +def check_for_malware_errors(document_download_response_code, notification): + """ + Check verdict and download calls to the document-download-api will + return error codes if the scan is in progress or if malware was detected. + This function contains the logic for handling these errors. + """ + + # 423 "Locked" response is sent if malicious content was detected + if document_download_response_code == 423: + current_app.logger.info( + f"Malicious content detected! Download and attachment failed for notification.id: {notification.id}" + ) + # Update notification that it contains malware + malware_failure(notification=notification) + # 428 "Precondition Required" response is sent if the scan is still in progress + elif document_download_response_code == 428: + current_app.logger.info(f"Malware scan in progress, could not download files for notification.id: {notification.id}") + # Throw error so celery will retry + raise MalwareScanInProgressException + # 408 "Request Timeout" response is sent if the scan does is not complete before it times out + elif document_download_response_code == 408: + current_app.logger.info(f"Malware scan timed out for notification.id: {notification.id}, send anyway") + return + elif document_download_response_code == 200: + return + # unexpected response code + else: + document_download_internal_error(notification=notification) + + +def check_service_over_bounce_rate(service_id: str): + bounce_rate = bounce_rate_client.get_bounce_rate(service_id) + bounce_rate_status = bounce_rate_client.check_bounce_rate_status(service_id) + debug_data = bounce_rate_client.get_debug_data(service_id) + current_app.logger.debug( + f"Service id: {service_id} Bounce Rate: {bounce_rate} Bounce Status: {bounce_rate_status}, Debug Data: {debug_data}" + ) + if bounce_rate_status == BounceRateStatus.CRITICAL.value: + # TODO: Bounce Rate V2, raise a BadRequestError when bounce rate meets or exceeds critical threshold + current_app.logger.warning( + f"Service: {service_id} has met or exceeded a critical bounce rate threshold of 10%. Bounce rate: {bounce_rate}" + ) + elif bounce_rate_status == BounceRateStatus.WARNING.value: + current_app.logger.warning( + f"Service: {service_id} has met or exceeded a warning bounce rate threshold of 5%. Bounce rate: {bounce_rate}" + ) + + +def mime_encoded_word_syntax(encoded_text="", charset="utf-8", encoding="B") -> str: + """MIME encoded-word syntax is a way to encode non-ASCII characters in email headers. + It is described here: + https://docs.aws.amazon.com/ses/latest/dg/send-email-raw.html#send-email-mime-encoding-headers + """ + return f"=?{charset}?{encoding}?{encoded_text}?=" + + +def get_from_address(friendly_from: str, email_from: str, sending_domain: str) -> str: + """ + This function returns the from_address or source in MIME encoded-word syntax + friendly_from is the sender's display name and may contain accents so we need to encode it to base64 + email_from and sending_domain should be ASCII only + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ses/client/send_raw_email.html + "If you want to use Unicode characters in the “friendly from” name, you must encode the “friendly from” + name using MIME encoded-word syntax, as described in Sending raw email using the Amazon SES API." + """ + friendly_from_b64 = base64.b64encode(friendly_from.encode()).decode("utf-8") + friendly_from_mime = mime_encoded_word_syntax(encoded_text=friendly_from_b64, charset="utf-8", encoding="B") + return f'"{friendly_from_mime}" <{unidecode(email_from)}@{unidecode(sending_domain)}>' + + def send_email_to_provider(notification: Notification): + current_app.logger.info(f"Sending email to provider for notification id {notification.id}") service = notification.service if not service.active: - technical_failure(notification=notification) + inactive_service_failure(notification=notification) return + + # If the notification was not sent already, the status should be created. if notification.status == "created": provider = provider_to_use(EMAIL_TYPE, notification.id) @@ -142,45 +252,30 @@ def send_email_to_provider(notification: Notification): for key in file_keys: check_file_url(personalisation_data[key]["document"], notification.id) sending_method = personalisation_data[key]["document"].get("sending_method") - # Check if a MLWR sid exists - if ( - current_app.config["MLWR_HOST"] - and "https" in str(current_app.config["MLWR_HOST"]) - and "mlwr_sid" in personalisation_data[key]["document"] - and personalisation_data[key]["document"]["mlwr_sid"] != "false" - ): - - mlwr_result = check_mlwr(personalisation_data[key]["document"]["mlwr_sid"]) - - if "state" in mlwr_result and mlwr_result["state"] == "completed": - # Update notification that it contains malware - if "submission" in mlwr_result and mlwr_result["submission"]["max_score"] >= 500: - malware_failure(notification=notification) - return - else: - # Throw error so celery will retry in sixty seconds - raise MalwarePendingException - + direct_file_url = personalisation_data[key]["document"]["direct_file_url"] + filename = personalisation_data[key]["document"].get("filename") + mime_type = personalisation_data[key]["document"].get("mime_type") + document_id = personalisation_data[key]["document"]["id"] + scan_verdict_response = document_download_client.check_scan_verdict(service.id, document_id, sending_method) + check_for_malware_errors(scan_verdict_response.status_code, notification) + current_app.logger.info(f"scan_verdict for document_id {document_id} is {scan_verdict_response.json()}") if sending_method == "attach": try: - - req = urllib.request.Request(personalisation_data[key]["document"]["direct_file_url"]) - with urllib.request.urlopen(req) as response: - buffer = response.read() - filename = personalisation_data[key]["document"].get("filename") - mime_type = personalisation_data[key]["document"].get("mime_type") - attachments.append( - { - "name": filename, - "data": buffer, - "mime_type": mime_type, - } - ) - except Exception: - current_app.logger.error( - "Could not download and attach {}".format(personalisation_data[key]["document"]["direct_file_url"]) + retries = Retry(total=5) + http = PoolManager(retries=retries) + + response = http.request("GET", url=direct_file_url) + attachments.append( + { + "name": filename, + "data": response.data, + "mime_type": mime_type, + } ) - del personalisation_data[key] + except Exception as e: + current_app.logger.error(f"Could not download and attach {direct_file_url}\nException: {e}") + del personalisation_data[key] + else: personalisation_data[key] = personalisation_data[key]["document"]["url"] @@ -207,17 +302,25 @@ def send_email_to_provider(notification: Notification): if current_app.config["SCAN_FOR_PII"]: contains_pii(notification, str(plain_text_email)) + current_app.logger.info( + f"Trying to update notification id {notification.id} with service research {service.research_mode} or key type {notification.key_type}" + ) if service.research_mode or notification.key_type == KEY_TYPE_TEST: notification.reference = send_email_response(notification.to) update_notification_to_sending(notification, provider) + elif notification.to == Config.INTERNAL_TEST_EMAIL_ADDRESS: + current_app.logger.info(f"notification {notification.id} sending to internal test email address. Not sending to AWS") + notification.reference = send_email_response(notification.to) + update_notification_to_sending(notification, provider) else: if service.sending_domain is None or service.sending_domain.strip() == "": sending_domain = current_app.config["NOTIFY_EMAIL_DOMAIN"] else: sending_domain = service.sending_domain - from_address = '"{}" <{}@{}>'.format(service.name, service.email_from, sending_domain) - + from_address = get_from_address( + friendly_from=service.name, email_from=service.email_from, sending_domain=sending_domain + ) email_reply_to = notification.reply_to_text reference = provider.send_email( @@ -229,8 +332,13 @@ def send_email_to_provider(notification: Notification): reply_to_address=validate_and_format_email_address(email_reply_to) if email_reply_to else None, attachments=attachments, ) + check_service_over_bounce_rate(service.id) + bounce_rate_client.set_sliding_notifications(service.id, str(notification.id)) + current_app.logger.info(f"Setting total notifications for service {service.id} in REDIS") + current_app.logger.info(f"Notification id {notification.id} HAS BEEN SENT") notification.reference = reference update_notification_to_sending(notification, provider) + current_app.logger.info(f"Notification id {notification.id} status in sending") # Record StatsD stats to compute SLOs statsd_client.timing_with_dates("email.total-time", notification.sent_at, notification.created_at) @@ -247,10 +355,77 @@ def update_notification_to_sending(notification, provider): dao_update_notification(notification) -def provider_to_use(notification_type, notification_id, international=False, sender=None): - active_providers_in_order = [ - p for p in get_provider_details_by_notification_type(notification_type, international) if p.active - ] +def update_notification_to_opted_out(notification, provider): + notification.sent_at = datetime.utcnow() + notification.sent_by = provider.get_name() + notification.status = NOTIFICATION_PERMANENT_FAILURE + notification.provider_response = "Phone number is opted out" + dao_update_notification(notification) + + +def provider_to_use( + notification_type: str, + notification_id: UUID, + to: Optional[str] = None, + international: bool = False, + sender: Optional[str] = None, + template_id: Optional[UUID] = None, +) -> Any: + """ + Get the provider to use for sending the notification. + SMS that are being sent with a dedicated number or internationally should not use Pinpoint. + + Args: + notification_type (str): SMS or EMAIL. + notification_id (UUID): id of notification. Just used for logging. + to (str, optional): recipient. Defaults to None. + international (bool, optional): Flags whether or not the message recipient is outside Zone 1 (US / Canada / Caribbean). Defaults to False. + sender (str, optional): reply_to_text to use. Defaults to None. + template_id (str, optional): template_id to use. Defaults to None. + + Raises: + Exception: No active providers. + + Returns: + provider: Provider to use to send the notification. + """ + + has_dedicated_number = sender is not None and sender.startswith("+1") + cannot_determine_recipient_country = False + recipient_outside_canada = False + sending_to_us_number = False + if to is not None: + match = next(iter(phonenumbers.PhoneNumberMatcher(to, "US")), None) + if match is None: + cannot_determine_recipient_country = True + elif ( + phonenumbers.region_code_for_number(match.number) == "US" + ): # The US is a special case that needs to send from a US toll free number + sending_to_us_number = True + elif phonenumbers.region_code_for_number(match.number) != "CA": + recipient_outside_canada = True + using_sc_pool_template = template_id is not None and str(template_id) in current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"] + zone_1_outside_canada = recipient_outside_canada and not international + do_not_use_pinpoint = ( + has_dedicated_number + or sending_to_us_number + or cannot_determine_recipient_country + or zone_1_outside_canada + or not current_app.config["AWS_PINPOINT_SC_POOL_ID"] + or ((not current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"]) and not using_sc_pool_template) + ) + if do_not_use_pinpoint: + active_providers_in_order = [ + p + for p in get_provider_details_by_notification_type(notification_type, international) + if p.active and p.identifier != PINPOINT_PROVIDER + ] + else: + active_providers_in_order = [ + p + for p in get_provider_details_by_notification_type(notification_type, international) + if p.active and p.identifier != SNS_PROVIDER + ] if not active_providers_in_order: current_app.logger.error("{} {} failed as no active providers".format(notification_type, notification_id)) @@ -266,12 +441,16 @@ def get_html_email_options(service: Service): "fip_banner_english": False, "fip_banner_french": True, "logo_with_background_colour": False, + "alt_text_en": None, + "alt_text_fr": None, } else: return { "fip_banner_english": True, "fip_banner_french": False, "logo_with_background_colour": False, + "alt_text_en": None, + "alt_text_fr": None, } logo_url = get_logo_url(service.email_branding.logo) if service.email_branding.logo else None @@ -284,10 +463,12 @@ def get_html_email_options(service: Service): "brand_logo": logo_url, "brand_text": service.email_branding.text, "brand_name": service.email_branding.name, + "alt_text_en": service.email_branding.alt_text_en, + "alt_text_fr": service.email_branding.alt_text_fr, } -def technical_failure(notification): +def inactive_service_failure(notification): notification.status = NOTIFICATION_TECHNICAL_FAILURE dao_update_notification(notification) raise NotificationTechnicalFailureException( @@ -310,15 +491,18 @@ def empty_message_failure(notification): def malware_failure(notification): notification.status = NOTIFICATION_VIRUS_SCAN_FAILED dao_update_notification(notification) - raise NotificationTechnicalFailureException( + raise MalwareDetectedException( "Send {} for notification id {} to provider is not allowed. Notification contains malware".format( notification.notification_type, notification.id ) ) -def check_mlwr(sid): - return check_mlwr_score(sid) +def document_download_internal_error(notification): + notification.status = NOTIFICATION_TECHNICAL_FAILURE + dao_update_notification(notification) + current_app.logger.error(f"Cannot send notification {notification.id}, document-download-api internal error.") + raise DocumentDownloadException def contains_pii(notification, text_content): diff --git a/app/email_branding/email_branding_schema.py b/app/email_branding/email_branding_schema.py index b7070eafba..06366bb8c1 100644 --- a/app/email_branding/email_branding_schema.py +++ b/app/email_branding/email_branding_schema.py @@ -10,8 +10,10 @@ "text": {"type": ["string", "null"]}, "logo": {"type": ["string", "null"]}, "brand_type": {"enum": BRANDING_TYPES}, + "alt_text_en": {"type": "string"}, + "alt_text_fr": {"type": "string"}, }, - "required": ["name"], + "required": ["name", "alt_text_en", "alt_text_fr"], } post_update_email_branding_schema = { @@ -24,6 +26,8 @@ "text": {"type": ["string", "null"]}, "logo": {"type": ["string", "null"]}, "brand_type": {"enum": BRANDING_TYPES}, + "alt_text_en": {"type": "string"}, + "alt_text_fr": {"type": "string"}, }, "required": [], } diff --git a/app/email_branding/rest.py b/app/email_branding/rest.py index 3dc5086148..6ae95745be 100644 --- a/app/email_branding/rest.py +++ b/app/email_branding/rest.py @@ -20,7 +20,10 @@ @email_branding_blueprint.route("", methods=["GET"]) def get_email_branding_options(): - email_branding_options = [o.serialize() for o in dao_get_email_branding_options()] + filter_by_organisation_id = request.args.get("organisation_id", None) + email_branding_options = [ + o.serialize() for o in dao_get_email_branding_options(filter_by_organisation_id=filter_by_organisation_id) + ] return jsonify(email_branding=email_branding_options) diff --git a/app/email_limit_utils.py b/app/email_limit_utils.py new file mode 100644 index 0000000000..0fb8bf3364 --- /dev/null +++ b/app/email_limit_utils.py @@ -0,0 +1,35 @@ +from datetime import timedelta +from uuid import UUID + +from flask import current_app +from notifications_utils.clients.redis import email_daily_count_cache_key +from notifications_utils.decorators import requires_feature + +from app import redis_store +from app.dao.services_dao import fetch_todays_total_email_count + + +def fetch_todays_email_count(service_id: UUID) -> int: + if not current_app.config["REDIS_ENABLED"]: + return fetch_todays_total_email_count(service_id) + + cache_key = email_daily_count_cache_key(service_id) + total_email_count = redis_store.get(cache_key) + if total_email_count is None: + total_email_count = fetch_todays_total_email_count(service_id) + redis_store.set(cache_key, total_email_count, ex=int(timedelta(hours=2).total_seconds())) + return int(total_email_count) + + +@requires_feature("REDIS_ENABLED") +def increment_todays_email_count(service_id: UUID, increment_by: int) -> None: + fetch_todays_email_count(service_id) # to make sure it's set in redis + cache_key = email_daily_count_cache_key(service_id) + redis_store.incrby(cache_key, increment_by) + + +@requires_feature("REDIS_ENABLED") +def decrement_todays_email_count(service_id: UUID, decrement_by: int) -> None: + fetch_todays_email_count(service_id) + cache_key = email_daily_count_cache_key(service_id) + redis_store.decrby(cache_key, decrement_by) diff --git a/app/encryption.py b/app/encryption.py index 4bb8e4b9e5..2622fcd938 100644 --- a/app/encryption.py +++ b/app/encryption.py @@ -1,17 +1,96 @@ +from typing import Any, List, NewType, Optional, TypedDict, cast + from flask_bcrypt import check_password_hash, generate_password_hash from itsdangerous import URLSafeSerializer +from typing_extensions import NotRequired # type: ignore + +SignedNotification = NewType("SignedNotification", str) + + +class NotificationDictToSign(TypedDict): + # todo: remove duplicate keys + # todo: remove all NotRequired and decide if key should be there or not + id: NotRequired[str] + template: str # actually template_id + service_id: NotRequired[str] + template_version: int + to: str # recipient + reply_to_text: NotRequired[str] + personalisation: Optional[dict] + simulated: NotRequired[bool] + api_key: str + key_type: str # should be ApiKeyType but I can't import that here + client_reference: Optional[str] + queue: Optional[str] + sender_id: Optional[str] + job: NotRequired[str] # actually job_id + row_number: Optional[Any] # should this be int or str? + + +class CryptoSigner: + def init_app(self, app: Any, secret_key: str | List[str], salt: str) -> None: + """Initialise the CryptoSigner class. + + Args: + app (Any): The Flask app. + secret_key (str | List[str]): The secret key or list of secret keys to use for signing. + salt (str): The salt to use for signing. + """ + self.app = app + self.secret_key = cast(List[str], [secret_key] if type(secret_key) is str else secret_key) + self.serializer = URLSafeSerializer(secret_key) + self.salt = salt + + def sign(self, to_sign: str | NotificationDictToSign) -> str | bytes: + """Sign a string or dict with the class secret key and salt. + + Args: + to_sign (str | NotificationDictToSign): The string or dict to sign. + + Returns: + str | bytes: The signed string or bytes. + """ + return self.serializer.dumps(to_sign, salt=self.salt) + + def sign_with_all_keys(self, to_sign: str | NotificationDictToSign) -> List[str | bytes]: + """Sign a string or dict with all the individual keys in the class secret key list, and the class salt. + + Args: + to_sign (str | NotificationDictToSign): The string or dict to sign. + + Returns: + List[str | bytes]: A list of signed values. + """ + signed: list[str | bytes] = [] + for k in reversed(self.secret_key): # reversed so that the default key is last + signed.append(URLSafeSerializer(k).dumps(to_sign, salt=self.salt)) + return signed + + def verify(self, to_verify: str | bytes) -> Any: + """Checks the signature of a signed value and returns the original value. + + Args: + to_verify (str | bytes): The signed value to check + + Returns: + Original value if signature is valid, raises BadSignature otherwise + Raises: + BadSignature: If the signature is invalid + """ + return self.serializer.loads(to_verify, salt=self.salt) -class Encryption: - def init_app(self, app): - self.serializer = URLSafeSerializer(app.config.get("SECRET_KEY")) - self.salt = app.config.get("DANGEROUS_SALT") + def verify_unsafe(self, to_verify: str | bytes) -> Any: + """Ignore the signature and return the original value that has been signed. + Since this ignores the signature it should be used with caution. - def encrypt(self, thing_to_encrypt): - return self.serializer.dumps(thing_to_encrypt, salt=self.salt) + Args: + to_verify (str | bytes): The signed value to unsign - def decrypt(self, thing_to_decrypt): - return self.serializer.loads(thing_to_decrypt, salt=self.salt) + Returns: + Any: Original value that has been signed + """ + return self.serializer.loads_unsafe(to_verify)[1] def hashpw(password): diff --git a/app/errors.py b/app/errors.py index 5a204de081..0bc19c0b2d 100644 --- a/app/errors.py +++ b/app/errors.py @@ -13,7 +13,6 @@ class VirusScanError(Exception): def __init__(self, message): - super().__init__(message) diff --git a/app/events/rest.py b/app/events/rest.py index 77b50f94eb..2d266be5fd 100644 --- a/app/events/rest.py +++ b/app/events/rest.py @@ -11,6 +11,6 @@ @events.route("", methods=["POST"]) def create_event(): data = request.get_json() - event = event_schema.load(data).data + event = event_schema.load(data) dao_create_event(event) - return jsonify(data=event_schema.dump(event).data), 201 + return jsonify(data=event_schema.dump(event)), 201 diff --git a/app/exceptions.py b/app/exceptions.py index fd3c1cd5d6..922a91ff79 100644 --- a/app/exceptions.py +++ b/app/exceptions.py @@ -11,9 +11,17 @@ class ArchiveValidationError(Exception): pass -class MalwarePendingException(Exception): +class MalwareScanInProgressException(Exception): + pass + + +class MalwareDetectedException(Exception): pass class InvalidUrlException(Exception): pass + + +class DocumentDownloadException(Exception): + pass diff --git a/app/history_meta.py b/app/history_meta.py index 6634df61d8..c8f8b030fc 100644 --- a/app/history_meta.py +++ b/app/history_meta.py @@ -168,7 +168,6 @@ def create_history(obj, history_cls=None): obj_state = attributes.instance_state(obj) data = {} for prop in obj_mapper.iterate_properties: - # expired object attributes and also deferred cols might not # be in the dict. force it them load no matter what by using getattr(). if prop.key not in obj_state.dict: diff --git a/app/invite/rest.py b/app/invite/rest.py index 18db40bfb0..93a61e1a1e 100644 --- a/app/invite/rest.py +++ b/app/invite/rest.py @@ -23,7 +23,7 @@ @invite.route("", methods=["POST"]) def create_invited_user(service_id): request_json = request.get_json() - invited_user, errors = invited_user_schema.load(request_json) + invited_user = invited_user_schema.load(request_json) save_invited_user(invited_user) template = dao_get_template_by_id(current_app.config["INVITATION_EMAIL_TEMPLATE_ID"]) @@ -50,24 +50,24 @@ def create_invited_user(service_id): send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) - return jsonify(data=invited_user_schema.dump(invited_user).data), 201 + return jsonify(data=invited_user_schema.dump(invited_user)), 201 @invite.route("", methods=["GET"]) def get_invited_users_by_service(service_id): invited_users = get_invited_users_for_service(service_id) - return jsonify(data=invited_user_schema.dump(invited_users, many=True).data), 200 + return jsonify(data=invited_user_schema.dump(invited_users, many=True)), 200 @invite.route("/", methods=["POST"]) def update_invited_user(service_id, invited_user_id): fetched = get_invited_user(service_id=service_id, invited_user_id=invited_user_id) - current_data = dict(invited_user_schema.dump(fetched).data.items()) + current_data = dict(invited_user_schema.dump(fetched).items()) current_data.update(request.get_json()) - update_dict = invited_user_schema.load(current_data).data + update_dict = invited_user_schema.load(current_data) save_invited_user(update_dict) - return jsonify(data=invited_user_schema.dump(fetched).data), 200 + return jsonify(data=invited_user_schema.dump(fetched)), 200 def invited_user_url(invited_user_id, invite_link_host=None): @@ -76,7 +76,6 @@ def invited_user_url(invited_user_id, invite_link_host=None): token = generate_token( str(invited_user_id), current_app.config["SECRET_KEY"], - current_app.config["DANGEROUS_SALT"], ) if invite_link_host is None: diff --git a/app/job/rest.py b/app/job/rest.py index bd57ceef87..28bfeafd33 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -1,7 +1,11 @@ +from datetime import datetime + import dateutil from flask import Blueprint, current_app, jsonify, request +from notifications_utils.recipients import RecipientCSV +from notifications_utils.template import Template -from app.aws.s3 import get_job_metadata_from_s3 +from app.aws.s3 import get_job_from_s3, get_job_metadata_from_s3 from app.celery.tasks import process_job from app.config import QueueNames from app.dao.fact_notification_status_dao import fetch_notification_statuses_for_job @@ -18,12 +22,21 @@ from app.dao.notifications_dao import get_notifications_for_job from app.dao.services_dao import dao_fetch_service_by_id from app.dao.templates_dao import dao_get_template_by_id +from app.email_limit_utils import decrement_todays_email_count from app.errors import InvalidRequest, register_errors from app.models import ( + EMAIL_TYPE, JOB_STATUS_CANCELLED, JOB_STATUS_PENDING, JOB_STATUS_SCHEDULED, - LETTER_TYPE, + SMS_TYPE, +) +from app.notifications.process_notifications import simulated_recipient +from app.notifications.validators import ( + check_email_daily_limit, + check_sms_daily_limit, + increment_email_daily_count_send_warnings_if_needed, + increment_sms_daily_count_send_warnings_if_needed, ) from app.schemas import ( job_schema, @@ -42,12 +55,14 @@ @job_blueprint.route("/", methods=["GET"]) def get_job_by_service_and_job_id(service_id, job_id): job = dao_get_job_by_service_id_and_job_id(service_id, job_id) - statistics = dao_get_notification_outcomes_for_job(service_id, job_id) - data = job_schema.dump(job).data - - data["statistics"] = [{"status": statistic[1], "count": statistic[0]} for statistic in statistics] - - return jsonify(data=data) + if job is not None: + statistics = dao_get_notification_outcomes_for_job(service_id, job_id) + data = job_schema.dump(job) + data["statistics"] = [{"status": statistic[1], "count": statistic[0]} for statistic in statistics] + return jsonify(data=data) + else: + current_app.logger.warning(f"Job not found in database for service_id {service_id} job_id {job_id}") + return jsonify(result="error", message="Job not found in database"), 404 @job_blueprint.route("//cancel", methods=["POST"]) @@ -55,24 +70,27 @@ def cancel_job(service_id, job_id): job = dao_get_future_scheduled_job_by_id_and_service_id(job_id, service_id) job.job_status = JOB_STATUS_CANCELLED dao_update_job(job) - + decrement_todays_email_count(service_id, job.notification_count) return get_job_by_service_and_job_id(service_id, job_id) @job_blueprint.route("//cancel-letter-job", methods=["POST"]) def cancel_letter_job(service_id, job_id): job = dao_get_job_by_service_id_and_job_id(service_id, job_id) - can_we_cancel, errors = can_letter_job_be_cancelled(job) - if can_we_cancel: - data = dao_cancel_letter_job(job) - return jsonify(data), 200 + if job is not None: + can_we_cancel, errors = can_letter_job_be_cancelled(job) + if can_we_cancel: + data = dao_cancel_letter_job(job) + return jsonify(data), 200 + else: + return jsonify(message=errors), 400 else: - return jsonify(message=errors), 400 + return jsonify(result="error", message="Job not found in database"), 404 @job_blueprint.route("//notifications", methods=["GET"]) def get_all_notifications_for_service_job(service_id, job_id): - data = notifications_filter_schema.load(request.args).data + data = notifications_filter_schema.load(request.args) page = data["page"] if "page" in data else 1 page_size = data["page_size"] if "page_size" in data else current_app.config.get("PAGE_SIZE") paginated_notifications = get_notifications_for_job(service_id, job_id, filter_dict=data, page=page, page_size=page_size) @@ -85,7 +103,7 @@ def get_all_notifications_for_service_job(service_id, job_id): if data.get("format_for_csv"): notifications = [notification.serialize_for_csv() for notification in paginated_notifications.items] else: - notifications = notification_with_template_schema.dump(paginated_notifications.items, many=True).data + notifications = notification_with_template_schema.dump(paginated_notifications.items, many=True) return ( jsonify( @@ -122,30 +140,71 @@ def create_job(service_id): raise InvalidRequest("Create job is not allowed: service is inactive ", 403) data = request.get_json() - data.update({"service": service_id}) + try: data.update(**get_job_metadata_from_s3(service_id, data["id"])) except KeyError: raise InvalidRequest({"id": ["Missing data for required field."]}, status_code=400) + if data.get("valid") != "True": + raise InvalidRequest("File is not valid, can't create job", 400) + data["template"] = data.pop("template_id") + template = dao_get_template_by_id(data["template"]) + template_errors = unarchived_template_schema.validate({"archived": template.archived}) - if template.template_type == LETTER_TYPE and service.restricted: - raise InvalidRequest("Create letter job is not allowed for service in trial mode ", 403) + if template_errors: + raise InvalidRequest(template_errors, status_code=400) - if data.get("valid") != "True": - raise InvalidRequest("File is not valid, can't create job", 400) + job = get_job_from_s3(service_id, data["id"]) + recipient_csv = RecipientCSV( + job, + template_type=template.template_type, + placeholders=template._as_utils_template().placeholders, + template=Template(template.__dict__), + ) + + if template.template_type == SMS_TYPE: + # set sender_id if missing + default_senders = [x for x in service.service_sms_senders if x.is_default] + default_sender_id = default_senders[0].id if default_senders else None + data["sender_id"] = data.get("sender_id", default_sender_id) + + # calculate the number of simulated recipients + numberOfSimulated = sum(simulated_recipient(i["phone_number"].data, template.template_type) for i in recipient_csv.rows) + mixedRecipients = numberOfSimulated > 0 and numberOfSimulated != len(recipient_csv) + + # if they have specified testing and NON-testing recipients, raise an error + if mixedRecipients: + raise InvalidRequest(message="Bulk sending to testing and non-testing numbers is not supported", status_code=400) + + is_test_notification = len(recipient_csv) == numberOfSimulated + + if not is_test_notification: + check_sms_daily_limit(service, len(recipient_csv)) + increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv)) + + elif template.template_type == EMAIL_TYPE: + if "notification_count" in data: + notification_count = int(data["notification_count"]) + else: + current_app.logger.warning( + f"notification_count not in metadata for job {data['id']}, using len(recipient_csv) instead." + ) + notification_count = len(recipient_csv) + + check_email_daily_limit(service, notification_count) - errors = unarchived_template_schema.validate({"archived": template.archived}) + scheduled_for = datetime.fromisoformat(data.get("scheduled_for")) if data.get("scheduled_for") else None - if errors: - raise InvalidRequest(errors, status_code=400) + if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): + increment_email_daily_count_send_warnings_if_needed(service, notification_count) data.update({"template_version": template.version}) - job = job_schema.load(data).data + job = job_schema.load(data) if job.scheduled_for: job.job_status = JOB_STATUS_SCHEDULED @@ -155,7 +214,7 @@ def create_job(service_id): if job.job_status == JOB_STATUS_PENDING: process_job.apply_async([str(job.id)], queue=QueueNames.JOBS) - job_json = job_schema.dump(job).data + job_json = job_schema.dump(job) job_json["statistics"] = [] return jsonify(data=job_json), 201 @@ -169,7 +228,7 @@ def get_paginated_jobs(service_id, limit_days, statuses, page): page_size=current_app.config["PAGE_SIZE"], statuses=statuses, ) - data = job_schema.dump(pagination.items, many=True).data + data = job_schema.dump(pagination.items, many=True) for job_data in data: start = job_data["processing_started"] start = dateutil.parser.parse(start).replace(tzinfo=None) if start else None diff --git a/app/json_provider.py b/app/json_provider.py new file mode 100644 index 0000000000..6c9bc52c09 --- /dev/null +++ b/app/json_provider.py @@ -0,0 +1,40 @@ +import json + +from flask.json.provider import JSONProvider, _default +from sqlalchemy.engine.row import Row + + +def default_encoder(o): + # Support for sqlalchemy.engine.row.Row + if isinstance(o, Row): + row: Row = o # type: ignore + m: dict = row._asdict() # type: ignore + return m + # Redirect to default + return _default(o) + + +class NotifyJSONProvider(JSONProvider): + """A JSON provider that adds edge case support for the Notify Python stack. + + Namely, these are currently supported: + + 1. Added support for the sqlalchemy.engine.row.Row data type. When we + upgraded to version 4, a few JSON serialization started to fail as + the library now returns a Row object on the session.query returns. + This encoder adds support to convert it to a dict, which the json + package supports by default. + + see https://github.com/pallets/flask/pull/4692 for details on JSONProvider + """ + + def dumps(self, obj, *, option=None, **kwargs): + return json.dumps(obj, default=default_encoder, **kwargs) + + def loads(self, s, **kwargs): + return json.loads(s, **kwargs) + + def response(self, *args, **kwargs): + obj = self._prepare_response_obj(args, kwargs) + dumped = self.dumps(obj) + return self._app.response_class(dumped, mimetype="application/json") diff --git a/app/letters/rest.py b/app/letters/rest.py index 506b495aad..87684e0b66 100644 --- a/app/letters/rest.py +++ b/app/letters/rest.py @@ -1,9 +1,5 @@ -from flask import Blueprint, jsonify, request +from flask import Blueprint -from app.celery.tasks import process_returned_letters_list -from app.config import QueueNames -from app.letters.letter_schemas import letter_references -from app.schema_validation import validate from app.v2.errors import register_errors letter_job = Blueprint("letter-job", __name__) @@ -12,8 +8,4 @@ @letter_job.route("/letters/returned", methods=["POST"]) def create_process_returned_letters_job(): - references = validate(request.get_json(), letter_references) - - process_returned_letters_list.apply_async([references["references"]], queue=QueueNames.DATABASE) - - return jsonify(references=references["references"]), 200 + pass diff --git a/app/letters/utils.py b/app/letters/utils.py index 6369b22040..8d5bcab489 100644 --- a/app/letters/utils.py +++ b/app/letters/utils.py @@ -1,21 +1,6 @@ -import io -import math -from datetime import datetime, timedelta from enum import Enum -import boto3 -from flask import current_app -from notifications_utils.letter_timings import LETTER_PROCESSING_DEADLINE -from notifications_utils.pdf import pdf_page_count -from notifications_utils.s3 import s3upload -from notifications_utils.timezones import convert_utc_to_local_timezone - -from app.models import ( - KEY_TYPE_TEST, - NOTIFICATION_VALIDATION_FAILED, - RESOLVE_POSTAGE_FOR_FILE_NAME, - SECOND_CLASS, -) +from app.models import SECOND_CLASS class ScanErrorType(Enum): @@ -29,203 +14,64 @@ class ScanErrorType(Enum): def get_folder_name(_now, is_test_or_scan_letter=False): - if is_test_or_scan_letter: - folder_name = "" - else: - print_datetime = convert_utc_to_local_timezone(_now) - if print_datetime.time() > LETTER_PROCESSING_DEADLINE: - print_datetime += timedelta(days=1) - folder_name = "{}/".format(print_datetime.date()) - return folder_name + pass def get_letter_pdf_filename(reference, crown, is_scan_letter=False, postage=SECOND_CLASS): - now = datetime.utcnow() - - upload_file_name = LETTERS_PDF_FILE_LOCATION_STRUCTURE.format( - folder=get_folder_name(now, is_scan_letter), - reference=reference, - duplex="D", - letter_class=RESOLVE_POSTAGE_FOR_FILE_NAME[postage], - colour="C", - crown="C" if crown else "N", - date=now.strftime("%Y%m%d%H%M%S"), - ).upper() - - return upload_file_name + pass def get_bucket_name_and_prefix_for_notification(notification): - folder = "" - if notification.status == NOTIFICATION_VALIDATION_FAILED: - bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - elif notification.key_type == KEY_TYPE_TEST: - bucket_name = current_app.config["TEST_LETTERS_BUCKET_NAME"] - else: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - if notification.sent_at: - folder = "{}/".format(notification.sent_at.date()) - elif notification.updated_at: - folder = get_folder_name(notification.updated_at, False) - else: - folder = get_folder_name(notification.created_at, False) - - upload_file_name = PRECOMPILED_BUCKET_PREFIX.format(folder=folder, reference=notification.reference).upper() - - return bucket_name, upload_file_name + pass def get_reference_from_filename(filename): - # filename looks like '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF' - filename_parts = filename.split(".") - return filename_parts[1] + pass def upload_letter_pdf(notification, pdf_data, precompiled=False): - current_app.logger.info( - "PDF Letter {} reference {} created at {}, {} bytes".format( - notification.id, - notification.reference, - notification.created_at, - len(pdf_data), - ) - ) - - upload_file_name = get_letter_pdf_filename( - notification.reference, - notification.service.crown, - is_scan_letter=precompiled or notification.key_type == KEY_TYPE_TEST, - postage=notification.postage, - ) - - if precompiled: - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - elif notification.key_type == KEY_TYPE_TEST: - bucket_name = current_app.config["TEST_LETTERS_BUCKET_NAME"] - else: - bucket_name = current_app.config["LETTERS_PDF_BUCKET_NAME"] - - s3upload( - filedata=pdf_data, - region=current_app.config["AWS_REGION"], - bucket_name=bucket_name, - file_location=upload_file_name, - ) - - current_app.logger.info( - "Uploaded letters PDF {} to {} for notification id {}".format(upload_file_name, bucket_name, notification.id) - ) - return upload_file_name + pass def move_failed_pdf(source_filename, scan_error_type): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - target_filename = ("ERROR/" if scan_error_type == ScanErrorType.ERROR else "FAILURE/") + source_filename - - _move_s3_object(scan_bucket, source_filename, scan_bucket, target_filename) + pass def copy_redaction_failed_pdf(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - target_filename = "REDACTION_FAILURE/" + source_filename - - _copy_s3_object(scan_bucket, source_filename, scan_bucket, target_filename) + pass def move_error_pdf_to_scan_bucket(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - error_file = "ERROR/" + source_filename - - _move_s3_object(scan_bucket, error_file, scan_bucket, source_filename) + pass def move_scan_to_invalid_pdf_bucket(source_filename): - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - invalid_pdf_bucket = current_app.config["INVALID_PDF_BUCKET_NAME"] - _move_s3_object(scan_bucket, source_filename, invalid_pdf_bucket, source_filename) + pass def move_uploaded_pdf_to_letters_bucket(source_filename, upload_filename): - _move_s3_object( - source_bucket=current_app.config["TRANSIENT_UPLOADED_LETTERS"], - source_filename=source_filename, - target_bucket=current_app.config["LETTERS_PDF_BUCKET_NAME"], - target_filename=upload_filename, - ) + pass def get_file_names_from_error_bucket(): - s3 = boto3.resource("s3") - scan_bucket = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - bucket = s3.Bucket(scan_bucket) - - return bucket.objects.filter(Prefix="ERROR") + pass def get_letter_pdf(notification): - bucket_name, prefix = get_bucket_name_and_prefix_for_notification(notification) - - s3 = boto3.resource("s3") - bucket = s3.Bucket(bucket_name) - item = next(x for x in bucket.objects.filter(Prefix=prefix)) - - obj = s3.Object(bucket_name=bucket_name, key=item.key) - return obj.get()["Body"].read() + pass def _move_s3_object(source_bucket, source_filename, target_bucket, target_filename): - s3 = boto3.resource("s3") - copy_source = {"Bucket": source_bucket, "Key": source_filename} - - target_bucket = s3.Bucket(target_bucket) - obj = target_bucket.Object(target_filename) - - # Tags are copied across but the expiration time is reset in the destination bucket - # e.g. if a file has 5 days left to expire on a ONE_WEEK retention in the source bucket, - # in the destination bucket the expiration time will be reset to 7 days left to expire - obj.copy(copy_source, ExtraArgs={"ServerSideEncryption": "AES256"}) - - s3.Object(source_bucket, source_filename).delete() - - current_app.logger.info( - "Moved letter PDF: {}/{} to {}/{}".format(source_bucket, source_filename, target_bucket, target_filename) - ) + pass def _copy_s3_object(source_bucket, source_filename, target_bucket, target_filename): - s3 = boto3.resource("s3") - copy_source = {"Bucket": source_bucket, "Key": source_filename} - - target_bucket = s3.Bucket(target_bucket) - obj = target_bucket.Object(target_filename) - - # Tags are copied across but the expiration time is reset in the destination bucket - # e.g. if a file has 5 days left to expire on a ONE_WEEK retention in the source bucket, - # in the destination bucket the expiration time will be reset to 7 days left to expire - obj.copy(copy_source, ExtraArgs={"ServerSideEncryption": "AES256"}) - - current_app.logger.info( - "Copied letter PDF: {}/{} to {}/{}".format(source_bucket, source_filename, target_bucket, target_filename) - ) + pass def letter_print_day(created_at): - bst_print_datetime = convert_utc_to_local_timezone(created_at) + timedelta(hours=6, minutes=30) - bst_print_date = bst_print_datetime.date() - - current_bst_date = convert_utc_to_local_timezone(datetime.utcnow()).date() - - if bst_print_date >= current_bst_date: - return "today" - else: - print_date = bst_print_datetime.strftime("%d %B").lstrip("0") - return "on {}".format(print_date) + pass def get_page_count(pdf): - pages = pdf_page_count(io.BytesIO(pdf)) - pages_per_sheet = 2 - billable_units = math.ceil(pages / pages_per_sheet) - return billable_units + pass diff --git a/app/models.py b/app/models.py index f6a6458923..74000e2b09 100644 --- a/app/models.py +++ b/app/models.py @@ -1,7 +1,8 @@ import datetime import itertools import uuid -from typing import Any, Iterable, Optional +from enum import Enum +from typing import Any, Iterable, Literal from flask import current_app, url_for from flask_sqlalchemy.model import DefaultMeta @@ -29,11 +30,20 @@ from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.ext.hybrid import hybrid_property -from app import DATETIME_FORMAT, db, encryption -from app.config import QueueNames +from app import ( + DATETIME_FORMAT, + db, + signer_api_key, + signer_bearer_token, + signer_inbound_sms, + signer_personalisation, +) +from app.clients.sms import SmsSendingVehicles from app.encryption import check_hash, hashpw from app.history_meta import Versioned +TemplateType = Literal["sms", "email", "letter"] + SMS_TYPE = "sms" EMAIL_TYPE = "email" LETTER_TYPE = "letter" @@ -56,6 +66,8 @@ COMPLAINT_CALLBACK_TYPE = "complaint" SERVICE_CALLBACK_TYPES = [DELIVERY_STATUS_CALLBACK_TYPE, COMPLAINT_CALLBACK_TYPE] +sms_sending_vehicles = db.Enum(*[vehicle.value for vehicle in SmsSendingVehicles], name="sms_sending_vehicles") + def filter_null_value_fields(obj): return dict(filter(lambda x: x[1] is not None, obj.items())) @@ -124,6 +136,7 @@ class User(BaseModel): ) blocked = db.Column(db.Boolean, nullable=False, default=False) additional_information = db.Column(JSONB(none_as_null=True), nullable=True, default={}) + password_expired = db.Column(db.Boolean, nullable=False, default=False) # either email auth or a mobile number must be provided CheckConstraint("auth_type = 'email_auth' or mobile_number is not null") @@ -159,7 +172,7 @@ def get_permissions(self, service_id=None): retval[service_id].append(x.permission) return retval - def serialize(self): + def serialize(self) -> dict: return { "id": self.id, "name": self.name, @@ -179,9 +192,10 @@ def serialize(self): "state": self.state, "blocked": self.blocked, "additional_information": self.additional_information, + "password_expired": self.password_expired, } - def serialize_for_users_list(self): + def serialize_for_users_list(self) -> dict: return { "id": self.id, "name": self.name, @@ -197,8 +211,6 @@ class ServiceUser(BaseModel): __table_args__ = (UniqueConstraint("user_id", "service_id", name="uix_user_to_service"),) - user = db.relationship("User") - user_to_organisation = db.Table( "user_to_organisation", @@ -267,8 +279,14 @@ class EmailBranding(BaseModel): nullable=False, default=BRANDING_ORG_NEW, ) + organisation_id = db.Column( + UUID(as_uuid=True), db.ForeignKey("organisation.id", ondelete="SET NULL"), index=True, nullable=True + ) + organisation = db.relationship("Organisation", back_populates="email_branding", foreign_keys=[organisation_id]) + alt_text_en = db.Column(db.String(), nullable=True) + alt_text_fr = db.Column(db.String(), nullable=True) - def serialize(self): + def serialize(self) -> dict: serialized = { "id": str(self.id), "colour": self.colour, @@ -276,6 +294,9 @@ def serialize(self): "name": self.name, "text": self.text, "brand_type": self.brand_type, + "organisation_id": str(self.organisation_id) if self.organisation_id else "", + "alt_text_en": self.alt_text_en, + "alt_text_fr": self.alt_text_fr, } return serialized @@ -307,7 +328,7 @@ class LetterBranding(BaseModel): name = db.Column(db.String(255), unique=True, nullable=False) filename = db.Column(db.String(255), unique=True, nullable=False) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "name": self.name, @@ -379,6 +400,7 @@ class Domain(BaseModel): ORGANISATION_TYPES = [ "central", + "province_or_territory", "local", "nhs_central", "nhs_local", @@ -439,10 +461,9 @@ class Organisation(BaseModel): "Domain", ) - email_branding = db.relationship("EmailBranding") + email_branding = db.relationship("EmailBranding", uselist=False) email_branding_id = db.Column( UUID(as_uuid=True), - db.ForeignKey("email_branding.id"), nullable=True, ) @@ -461,7 +482,7 @@ def live_services(self): def domain_list(self): return [domain.domain for domain in self.domains] - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "name": self.name, @@ -482,7 +503,7 @@ def serialize(self): "count_of_live_services": len(self.live_services), } - def serialize_for_list(self): + def serialize_for_list(self) -> dict: return { "name": self.name, "id": str(self.id), @@ -514,6 +535,7 @@ class Service(BaseModel, Versioned): ) active = db.Column(db.Boolean, index=False, unique=False, nullable=False, default=True) message_limit = db.Column(db.BigInteger, index=False, unique=False, nullable=False) + sms_daily_limit = db.Column(db.BigInteger, index=False, unique=False, nullable=False) restricted = db.Column(db.Boolean, index=False, unique=False, nullable=False) research_mode = db.Column(db.Boolean, index=False, unique=False, nullable=False, default=False) email_from = db.Column(db.Text, index=False, unique=True, nullable=False) @@ -539,6 +561,7 @@ class Service(BaseModel, Versioned): go_live_user = db.relationship("User", foreign_keys=[go_live_user_id]) go_live_at = db.Column(db.DateTime, nullable=True) sending_domain = db.Column(db.String(255), nullable=True, unique=False) + organisation_notes = db.Column(db.String(255), nullable=True, unique=False) organisation_id = db.Column(UUID(as_uuid=True), db.ForeignKey("organisation.id"), index=True, nullable=True) organisation = db.relationship("Organisation", backref="services") @@ -584,6 +607,7 @@ def from_json(cls, data): fields.pop("letter_logo_filename", None) fields.pop("letter_contact_block", None) fields.pop("email_branding", None) + fields["sms_daily_limit"] = fields.get("sms_daily_limit", 100) return cls(**fields) @@ -593,7 +617,7 @@ def get_inbound_number(self): def get_default_sms_sender(self): default_sms_sender = [x for x in self.service_sms_senders if x.is_default] - return default_sms_sender[0].sms_sender + return default_sms_sender[0].sms_sender if default_sms_sender else None def get_default_reply_to_email_address(self): default_reply_to = [x for x in self.reply_to_email_addresses if x.is_default] @@ -606,7 +630,7 @@ def get_default_letter_contact(self): def has_permission(self, permission): return permission in [p.permission for p in self.permissions] - def serialize_for_org_dashboard(self): + def serialize_for_org_dashboard(self) -> dict: return { "id": str(self.id), "name": self.name, @@ -633,14 +657,14 @@ class AnnualBilling(BaseModel): UniqueConstraint("financial_year_start", "service_id", name="ix_annual_billing_service_id") service = db.relationship(Service, backref=db.backref("annual_billing", uselist=True)) - def serialize_free_sms_items(self): + def serialize_free_sms_items(self) -> dict: return { "free_sms_fragment_limit": self.free_sms_fragment_limit, "financial_year_start": self.financial_year_start, } - def serialize(self): - def serialize_service(): + def serialize(self) -> dict: + def serialize_service() -> dict: return {"id": str(self.service_id), "name": self.service.name} return { @@ -672,8 +696,8 @@ class InboundNumber(BaseModel): created_at = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False) updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) - def serialize(self): - def serialize_service(): + def serialize(self) -> dict: + def serialize_service() -> dict: return {"id": str(self.service_id), "name": self.service.name} return { @@ -716,7 +740,7 @@ class ServiceSmsSender(BaseModel): def get_reply_to_text(self): return try_validate_and_format_phone_number(self.sms_sender) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "sms_sender": self.sms_sender, @@ -746,7 +770,6 @@ class ServicePermission(BaseModel): primary_key=True, nullable=False, ) - service = db.relationship("Service") created_at = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False) service_permission_types = db.relationship(Service, backref=db.backref("permissions", cascade="all, delete-orphan")) @@ -817,15 +840,15 @@ class ServiceInboundApi(BaseModel, Versioned): @property def bearer_token(self): if self._bearer_token: - return encryption.decrypt(self._bearer_token) + return signer_bearer_token.verify(self._bearer_token) return None @bearer_token.setter def bearer_token(self, bearer_token): if bearer_token: - self._bearer_token = encryption.encrypt(str(bearer_token)) + self._bearer_token = signer_bearer_token.sign(str(bearer_token)) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "service_id": str(self.service_id), @@ -854,15 +877,15 @@ class ServiceCallbackApi(BaseModel, Versioned): @property def bearer_token(self): if self._bearer_token: - return encryption.decrypt(self._bearer_token) + return signer_bearer_token.verify(self._bearer_token) return None @bearer_token.setter def bearer_token(self, bearer_token): if bearer_token: - self._bearer_token = encryption.encrypt(str(bearer_token)) + self._bearer_token = signer_bearer_token.sign(str(bearer_token)) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "service_id": str(self.service_id), @@ -905,6 +928,8 @@ class ApiKey(BaseModel, Versioned): ) created_by = db.relationship("User") created_by_id = db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=False) + compromised_key_info = db.Column(JSONB(none_as_null=True), nullable=True, default={}) + last_used_timestamp = db.Column(db.DateTime, index=False, unique=False, nullable=True, default=None) __table_args__ = ( Index( @@ -919,18 +944,19 @@ class ApiKey(BaseModel, Versioned): @property def secret(self): if self._secret: - return encryption.decrypt(self._secret) + return signer_api_key.verify(self._secret) return None @secret.setter def secret(self, secret): if secret: - self._secret = encryption.encrypt(str(secret)) + self._secret = signer_api_key.sign(str(secret)) -KEY_TYPE_NORMAL = "normal" -KEY_TYPE_TEAM = "team" -KEY_TYPE_TEST = "test" +ApiKeyType = Literal["normal", "team", "test"] +KEY_TYPE_NORMAL: Literal["normal"] = "normal" +KEY_TYPE_TEAM: Literal["team"] = "team" +KEY_TYPE_TEST: Literal["test"] = "test" class KeyTypes(BaseModel): @@ -964,7 +990,7 @@ class TemplateFolder(BaseModel): __table_args__: Iterable[Any] = (UniqueConstraint("id", "service_id", name="ix_id_service_id"), {}) - def serialize(self): + def serialize(self) -> dict: return { "id": self.id, "name": self.name, @@ -1010,6 +1036,42 @@ def get_users_with_permission(self): PRECOMPILED_TEMPLATE_NAME = "Pre-compiled PDF" +class TemplateCategory(BaseModel): + __tablename__ = "template_categories" + + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name_en = db.Column(db.String(255), unique=True, nullable=False) + name_fr = db.Column(db.String(255), unique=True, nullable=False) + description_en = db.Column(db.String(200), nullable=True) + description_fr = db.Column(db.String(200), nullable=True) + sms_process_type = db.Column(db.String(200), nullable=False) + email_process_type = db.Column(db.String(200), nullable=False) + hidden = db.Column(db.Boolean, nullable=False, default=False) + created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) + updated_at = db.Column(db.DateTime, onupdate=datetime.datetime.utcnow) + sms_sending_vehicle = db.Column(sms_sending_vehicles, nullable=False, default="long_code") + + def serialize(self): + return { + "id": self.id, + "name_en": self.name_en, + "name_fr": self.name_fr, + "description_en": self.description_en, + "description_fr": self.description_fr, + "sms_process_type": self.sms_process_type, + "email_process_type": self.email_process_type, + "hidden": self.hidden, + "created_at": self.created_at, + "updated_at": self.updated_at, + "sms_sending_vehicle": self.sms_sending_vehicle, + } + + @classmethod + def from_json(cls, data): + fields = data.copy() + return cls(**fields) + + class TemplateBase(BaseModel): __abstract__ = True @@ -1021,7 +1083,7 @@ def __init__(self, **kwargs): id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) name = db.Column(db.String(255), nullable=False) - template_type = db.Column(template_types, nullable=False) + template_type: TemplateType = db.Column(template_types, nullable=False) created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) updated_at = db.Column(db.DateTime, onupdate=datetime.datetime.utcnow) content = db.Column(db.Text, nullable=False) @@ -1055,6 +1117,14 @@ def service_id(cls): def created_by_id(cls): return db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=False) + @declared_attr + def template_category_id(cls): + return db.Column(UUID(as_uuid=True), db.ForeignKey("template_categories.id"), index=True, nullable=True) + + @declared_attr + def template_category(cls): + return db.relationship("TemplateCategory", primaryjoin="Template.template_category_id == TemplateCategory.id") + @declared_attr def created_by(cls): return db.relationship("User") @@ -1065,17 +1135,10 @@ def process_type(cls): db.String(255), db.ForeignKey("template_process_type.name"), index=True, - nullable=False, + nullable=True, default=NORMAL, ) - def queue_to_use(self): - return { - NORMAL: None, - PRIORITY: QueueNames.PRIORITY, - BULK: QueueNames.BULK, - }[self.process_type] - redact_personalisation = association_proxy("template_redacted", "redact_personalisation") @declared_attr @@ -1135,7 +1198,7 @@ def _as_utils_template(self): contact_block=self.service.get_default_letter_contact(), ) - def serialize(self): + def serialize(self) -> dict: serialized = { "id": str(self.id), "type": self.template_type, @@ -1182,6 +1245,17 @@ def get_link(self): _external=True, ) + @property + def template_process_type(self): + """By default we use the process_type from TemplateCategory, but allow admins to override it on a per-template basis. + Only when overriden do we use the process_type from the template itself. + """ + if self.template_type == SMS_TYPE: + return self.process_type if self.process_type else self.template_categories.sms_process_type + elif self.template_type == EMAIL_TYPE: + return self.process_type if self.process_type else self.template_categories.email_process_type + return self.process_type + @classmethod def from_json(cls, data, folder=None): """ @@ -1242,6 +1316,10 @@ def from_json(cls, data): fields.pop("folder", None) return super(TemplateHistory, cls).from_json(fields) + @declared_attr + def template_category(cls): + return db.relationship("TemplateCategory", primaryjoin="TemplateHistory.template_category_id == TemplateCategory.id") + @declared_attr def template_redacted(cls): return db.relationship( @@ -1260,12 +1338,14 @@ def get_link(self): SNS_PROVIDER = "sns" +PINPOINT_PROVIDER = "pinpoint" SES_PROVIDER = "ses" -SMS_PROVIDERS = [SNS_PROVIDER] +SMS_PROVIDERS = [SNS_PROVIDER, PINPOINT_PROVIDER] EMAIL_PROVIDERS = [SES_PROVIDER] PROVIDERS = SMS_PROVIDERS + EMAIL_PROVIDERS +NotificationType = Literal["email", "sms", "letter"] NOTIFICATION_TYPE = [EMAIL_TYPE, SMS_TYPE, LETTER_TYPE] notification_types = db.Enum(*NOTIFICATION_TYPE, name="notification_type") @@ -1348,9 +1428,9 @@ class JobStatus(BaseModel): class Job(BaseModel): __tablename__ = "jobs" - id: UUID = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - original_file_name: str = db.Column(db.String, nullable=False) - service_id: UUID = db.Column( + id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + original_file_name = db.Column(db.String, nullable=False) + service_id = db.Column( UUID(as_uuid=True), db.ForeignKey("services.id"), index=True, @@ -1358,44 +1438,44 @@ class Job(BaseModel): nullable=False, ) service = db.relationship("Service", backref=db.backref("jobs", lazy="dynamic")) - template_id: UUID = db.Column(UUID(as_uuid=True), db.ForeignKey("templates.id"), index=True, unique=False) + template_id = db.Column(UUID(as_uuid=True), db.ForeignKey("templates.id"), index=True, unique=False) template = db.relationship("Template", backref=db.backref("jobs", lazy="dynamic")) - template_version: int = db.Column(db.Integer, nullable=False) - created_at: datetime.datetime = db.Column( + template_version = db.Column(db.Integer, nullable=False) + created_at = db.Column( db.DateTime, index=False, unique=False, nullable=False, default=datetime.datetime.utcnow, ) - updated_at: Optional[datetime.datetime] = db.Column( + updated_at = db.Column( db.DateTime, index=False, unique=False, nullable=True, onupdate=datetime.datetime.utcnow, ) - notification_count: int = db.Column(db.Integer, nullable=False) - notifications_sent: int = db.Column(db.Integer, nullable=False, default=0) - notifications_delivered: int = db.Column(db.Integer, nullable=False, default=0) - notifications_failed: int = db.Column(db.Integer, nullable=False, default=0) + notification_count = db.Column(db.Integer, nullable=False) + notifications_sent = db.Column(db.Integer, nullable=False, default=0) + notifications_delivered = db.Column(db.Integer, nullable=False, default=0) + notifications_failed = db.Column(db.Integer, nullable=False, default=0) - processing_started: Optional[datetime.datetime] = db.Column(db.DateTime, index=False, unique=False, nullable=True) - processing_finished: Optional[datetime.datetime] = db.Column(db.DateTime, index=False, unique=False, nullable=True) + processing_started = db.Column(db.DateTime, index=False, unique=False, nullable=True) + processing_finished = db.Column(db.DateTime, index=False, unique=False, nullable=True) created_by = db.relationship("User") - created_by_id: Optional[UUID] = db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=True) - api_key_id: Optional[UUID] = db.Column(UUID(as_uuid=True), db.ForeignKey("api_keys.id"), index=True, nullable=True) + created_by_id = db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=True) + api_key_id = db.Column(UUID(as_uuid=True), db.ForeignKey("api_keys.id"), index=True, nullable=True) api_key = db.relationship("ApiKey") - scheduled_for: Optional[datetime.datetime] = db.Column(db.DateTime, index=True, unique=False, nullable=True) - job_status: str = db.Column( + scheduled_for = db.Column(db.DateTime, index=True, unique=False, nullable=True) + job_status = db.Column( db.String(255), db.ForeignKey("job_status.name"), index=True, nullable=False, default="pending", ) - archived: bool = db.Column(db.Boolean, nullable=False, default=False) - sender_id: Optional[UUID] = db.Column(UUID(as_uuid=True), index=False, unique=False, nullable=True) + archived = db.Column(db.Boolean, nullable=False, default=False) + sender_id = db.Column(UUID(as_uuid=True), index=False, unique=False, nullable=True) VERIFY_CODE_TYPES = [EMAIL_TYPE, SMS_TYPE] @@ -1524,6 +1604,42 @@ def check_code(self, cde): POSTAGE_TYPES = [FIRST_CLASS, SECOND_CLASS] RESOLVE_POSTAGE_FOR_FILE_NAME = {FIRST_CLASS: 1, SECOND_CLASS: 2} +# Bounce types +NOTIFICATION_HARD_BOUNCE = "hard-bounce" +NOTIFICATION_SOFT_BOUNCE = "soft-bounce" +NOTIFICATION_UNKNOWN_BOUNCE = "unknown-bounce" +# List +NOTIFICATION_FEEDBACK_TYPES = [NOTIFICATION_HARD_BOUNCE, NOTIFICATION_SOFT_BOUNCE, NOTIFICATION_UNKNOWN_BOUNCE] + +# Hard bounce sub-types +NOTIFICATION_HARD_GENERAL = "general" +NOTIFICATION_HARD_NOEMAIL = "no-email" +NOTIFICATION_HARD_SUPPRESSED = "suppressed" +NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST = "on-account-suppression-list" +# List +NOTIFICATION_HARD_BOUNCE_TYPES = [ + NOTIFICATION_HARD_GENERAL, + NOTIFICATION_HARD_NOEMAIL, + NOTIFICATION_HARD_SUPPRESSED, + NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST, +] + +# Soft bounce sub-types +NOTIFICATION_SOFT_GENERAL = "general" +NOTIFICATION_SOFT_MAILBOXFULL = "mailbox-full" +NOTIFICATION_SOFT_MESSAGETOOLARGE = "message-too-large" +NOTIFICATION_SOFT_CONTENTREJECTED = "content-rejected" +NOTIFICATION_SOFT_ATTACHMENTREJECTED = "attachment-rejected" +# List +NOTIFICATION_SOFT_BOUNCE_TYPES = [ + NOTIFICATION_SOFT_GENERAL, + NOTIFICATION_SOFT_MAILBOXFULL, + NOTIFICATION_SOFT_MESSAGETOOLARGE, + NOTIFICATION_SOFT_CONTENTREJECTED, + NOTIFICATION_SOFT_ATTACHMENTREJECTED, +] +NOTIFICATION_UNKNOWN_BOUNCE_SUBTYPE = "unknown-bounce-subtype" + class NotificationStatusTypes(BaseModel): __tablename__ = "notification_status_types" @@ -1535,13 +1651,13 @@ class Notification(BaseModel): __tablename__ = "notifications" id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - to = db.Column(db.String, nullable=False) - normalised_to = db.Column(db.String, nullable=True) + to = db.Column(db.SensitiveString, nullable=False) + normalised_to = db.Column(db.SensitiveString, nullable=True) job_id = db.Column(UUID(as_uuid=True), db.ForeignKey("jobs.id"), index=True, unique=False) job = db.relationship("Job", backref=db.backref("notifications", lazy="dynamic")) job_row_number = db.Column(db.Integer, nullable=True) service_id = db.Column(UUID(as_uuid=True), db.ForeignKey("services.id"), index=True, unique=False) - service: Service = db.relationship("Service") + service = db.relationship("Service") template_id = db.Column(UUID(as_uuid=True), index=True, unique=False) template_version = db.Column(db.Integer, nullable=False) template = db.relationship("TemplateHistory") @@ -1577,9 +1693,9 @@ class Notification(BaseModel): ) reference = db.Column(db.String, nullable=True, index=True) client_reference = db.Column(db.String, index=True, nullable=True) - _personalisation = db.Column(db.String, nullable=True) + _personalisation = db.Column(db.SensitiveString, nullable=True) - scheduled_notification = db.relationship("ScheduledNotification", uselist=False) + scheduled_notification = db.relationship("ScheduledNotification", uselist=False, back_populates="notification") client_reference = db.Column(db.String, index=True, nullable=True) @@ -1594,6 +1710,13 @@ class Notification(BaseModel): postage = db.Column(db.String, nullable=True) provider_response = db.Column(db.Text, nullable=True) + queue_name = db.Column(db.Text, nullable=True) + + # feedback columns + feedback_type = db.Column(db.String, nullable=True) + feedback_subtype = db.Column(db.String, nullable=True) + ses_feedback_id = db.Column(db.String, nullable=True) + ses_feedback_date = db.Column(db.DateTime, nullable=True) CheckConstraint( """ @@ -1616,12 +1739,12 @@ class Notification(BaseModel): @property def personalisation(self): if self._personalisation: - return encryption.decrypt(self._personalisation) + return signer_personalisation.verify(self._personalisation) return {} @personalisation.setter def personalisation(self, personalisation): - self._personalisation = encryption.encrypt(personalisation or {}) + self._personalisation = signer_personalisation.sign(personalisation or {}) def completed_at(self): if self.status in NOTIFICATION_STATUS_TYPES_COMPLETED: @@ -1701,25 +1824,40 @@ def subject(self): @property def formatted_status(self): + def _getStatusByBounceSubtype(): + """Return the status of a notification based on the bounce sub type""" + if self.feedback_subtype: + return { + "suppressed": "Blocked", + "on-account-suppression-list": "Blocked", + }.get(self.feedback_subtype, "No such address") + else: + return "No such address" + return { "email": { "failed": "Failed", - "technical-failure": "Technical failure", - "temporary-failure": "Inbox not accepting messages right now", - "permanent-failure": "Email address doesn’t exist", + "technical-failure": "Tech issue", + "temporary-failure": "Content or inbox issue", + "permanent-failure": _getStatusByBounceSubtype(), + "virus-scan-failed": "Attachment has virus", "delivered": "Delivered", - "sending": "Sending", - "created": "Sending", + "sending": "In transit", + "created": "In transit", "sent": "Delivered", + "pending": "In transit", + "pending-virus-check": "In transit", + "pii-check-failed": "Exceeds Protected A", }, "sms": { "failed": "Failed", - "technical-failure": "Technical failure", - "temporary-failure": "Phone not accepting messages right now", - "permanent-failure": "Phone number doesn’t exist", + "technical-failure": "Tech issue", + "temporary-failure": "Carrier issue", + "permanent-failure": "No such number", "delivered": "Delivered", - "sending": "Sending", - "created": "Sending", + "sending": "In transit", + "created": "In transit", + "pending": "In transit", "sent": "Sent", }, "letter": { @@ -1762,7 +1900,7 @@ def get_created_by_email_address(self): else: return None - def serialize_for_csv(self): + def serialize_for_csv(self) -> dict: created_at_in_bst = convert_utc_to_local_timezone(self.created_at) serialized = { "row_number": "" if self.job_row_number is None else self.job_row_number + 1, @@ -1778,7 +1916,7 @@ def serialize_for_csv(self): return serialized - def serialize(self): + def serialize(self) -> dict: template_dict = { "version": self.template.version, "id": self.template.id, @@ -1799,6 +1937,7 @@ def serialize(self): "postcode": None, "type": self.notification_type, "status": self.get_letter_status() if self.notification_type == LETTER_TYPE else self.status, + "status_description": self.formatted_status, "provider_response": self.provider_response, "template": template_dict, "body": self.content, @@ -1882,6 +2021,14 @@ class NotificationHistory(BaseModel, HistoryModel): created_by_id = db.Column(UUID(as_uuid=True), nullable=True) postage = db.Column(db.String, nullable=True) + queue_name = db.Column(db.Text, nullable=True) + + # feedback columns + feedback_type = db.Column(db.String, nullable=True) + feedback_subtype = db.Column(db.String, nullable=True) + ses_feedback_id = db.Column(db.String, nullable=True) + ses_feedback_date = db.Column(db.DateTime, nullable=True) + CheckConstraint( """ CASE WHEN notification_type = 'letter' THEN @@ -1921,7 +2068,7 @@ class ScheduledNotification(BaseModel): index=True, nullable=False, ) - notification = db.relationship("Notification", uselist=False) + notification = db.relationship("Notification", uselist=False, back_populates="scheduled_notification") scheduled_for = db.Column(db.DateTime, index=False, nullable=False) pending = db.Column(db.Boolean, nullable=False, default=True) @@ -1993,7 +2140,7 @@ class InvitedOrganisationUser(BaseModel): default=INVITE_PENDING, ) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "email_address": self.email_address, @@ -2005,6 +2152,18 @@ def serialize(self): # Service Permissions +PermissionType = Literal[ + "manage_users", + "manage_templates", + "manage_settings", + "send_texts", + "send_emails", + "send_letters", + "manage_api_keys", + "platform_admin", + "view_activity", +] + MANAGE_USERS = "manage_users" MANAGE_TEMPLATES = "manage_templates" MANAGE_SETTINGS = "manage_settings" @@ -2044,7 +2203,7 @@ class Permission(BaseModel): service = db.relationship("Service") user_id = db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=False) user = db.relationship("User") - permission = db.Column( + permission: PermissionType = db.Column( db.Enum(*PERMISSION_LIST, name="permission_types"), index=False, unique=False, @@ -2108,13 +2267,13 @@ class InboundSms(BaseModel): @property def content(self): - return encryption.decrypt(self._content) + return signer_inbound_sms.verify(self._content) @content.setter def content(self, content): - self._content = encryption.encrypt(content) + self._content = signer_inbound_sms.sign(content) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "created_at": self.created_at.strftime(DATETIME_FORMAT), @@ -2157,7 +2316,7 @@ class ServiceEmailReplyTo(BaseModel): created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "service_id": str(self.service_id), @@ -2189,7 +2348,7 @@ class ServiceLetterContact(BaseModel): created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "service_id": str(self.service_id), @@ -2277,6 +2436,7 @@ class FactNotificationStatus(BaseModel): key_type = db.Column(db.Text, primary_key=True, nullable=False) notification_status = db.Column(db.Text, primary_key=True, nullable=False) notification_count = db.Column(db.Integer(), nullable=False) + billable_units = db.Column(db.Integer(), nullable=False) created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) @@ -2304,7 +2464,7 @@ class Complaint(BaseModel): complaint_date = db.Column(db.DateTime, nullable=True) created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "notification_id": str(self.notification_id), @@ -2336,7 +2496,7 @@ class ServiceDataRetention(BaseModel): __table_args__ = (UniqueConstraint("service_id", "notification_type", name="uix_service_data_retention"),) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "service_id": str(self.service_id), @@ -2366,7 +2526,7 @@ class Fido2Key(BaseModel): created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "user_id": str(self.user_id), @@ -2408,7 +2568,7 @@ class LoginEvent(BaseModel): created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) - def serialize(self): + def serialize(self) -> dict: return { "id": str(self.id), "user_id": str(self.user_id), @@ -2416,3 +2576,9 @@ def serialize(self): "created_at": self.created_at.strftime(DATETIME_FORMAT), "updated_at": self.updated_at.strftime(DATETIME_FORMAT) if self.updated_at else None, } + + +class BounceRateStatus(Enum): + NORMAL = "normal" + WARNING = "warning" + CRITICAL = "critical" diff --git a/app/notifications/callbacks.py b/app/notifications/callbacks.py index 28d6cd944e..3cc6d0bb91 100644 --- a/app/notifications/callbacks.py +++ b/app/notifications/callbacks.py @@ -1,3 +1,5 @@ +from flask import current_app + from app.celery.service_callback_tasks import send_delivery_status_to_service from app.config import QueueNames from app.dao.service_callback_api_dao import ( @@ -6,22 +8,25 @@ def _check_and_queue_callback_task(notification): + if notification is None: + current_app.logger.warning("No notification provided, cannot queue callback task") + return # queue callback task only if the service_callback_api exists service_callback_api = get_service_delivery_status_callback_api_for_service(service_id=notification.service_id) if service_callback_api: notification_data = create_delivery_status_callback_data(notification, service_callback_api) - send_delivery_status_to_service.apply_async([str(notification.id), notification_data], queue=QueueNames.CALLBACKS) def create_delivery_status_callback_data(notification, service_callback_api): - from app import DATETIME_FORMAT, encryption + from app import DATETIME_FORMAT, signer_delivery_status data = { "notification_id": str(notification.id), "notification_client_reference": notification.client_reference, "notification_to": notification.to, "notification_status": notification.status, + "notification_status_description": notification.formatted_status, "notification_provider_response": notification.provider_response, "notification_created_at": notification.created_at.strftime(DATETIME_FORMAT), "notification_updated_at": notification.updated_at.strftime(DATETIME_FORMAT) if notification.updated_at else None, @@ -31,11 +36,11 @@ def create_delivery_status_callback_data(notification, service_callback_api): "service_callback_api_bearer_token": service_callback_api.bearer_token, } - return encryption.encrypt(data) + return signer_delivery_status.sign(data) def create_complaint_callback_data(complaint, notification, service_callback_api, recipient): - from app import DATETIME_FORMAT, encryption + from app import DATETIME_FORMAT, signer_complaint data = { "complaint_id": str(complaint.id), @@ -47,4 +52,4 @@ def create_complaint_callback_data(complaint, notification, service_callback_api "service_callback_api_bearer_token": service_callback_api.bearer_token, } - return encryption.encrypt(data) + return signer_complaint.sign(data) diff --git a/app/notifications/notifications_letter_callback.py b/app/notifications/notifications_letter_callback.py index 49c6fb027f..2c841a4402 100644 --- a/app/notifications/notifications_letter_callback.py +++ b/app/notifications/notifications_letter_callback.py @@ -3,11 +3,6 @@ from flask import Blueprint, current_app, jsonify, request -from app.celery.tasks import ( - record_daily_sorted_counts, - update_letter_notifications_statuses, -) -from app.config import QueueNames from app.notifications.utils import autoconfirm_subscription from app.schema_validation import validate from app.v2.errors import register_errors @@ -55,7 +50,5 @@ def process_letter_response(): if filename.lower().endswith("rs.txt") or filename.lower().endswith("rsp.txt"): current_app.logger.info("DVLA callback: Calling task to update letter notifications") - update_letter_notifications_statuses.apply_async([filename], queue=QueueNames.NOTIFY) - record_daily_sorted_counts.apply_async([filename], queue=QueueNames.NOTIFY) return jsonify(result="success", message="DVLA callback succeeded"), 200 diff --git a/app/notifications/notifications_ses_callback.py b/app/notifications/notifications_ses_callback.py index 6d6fb61f96..ea49b23fd9 100644 --- a/app/notifications/notifications_ses_callback.py +++ b/app/notifications/notifications_ses_callback.py @@ -3,11 +3,30 @@ from app.celery.service_callback_tasks import send_complaint_to_service from app.config import QueueNames from app.dao.complaint_dao import save_complaint -from app.dao.notifications_dao import dao_get_notification_history_by_reference +from app.dao.notifications_dao import ( + _update_notification_status, + dao_get_notification_history_by_reference, +) from app.dao.service_callback_api_dao import ( get_service_complaint_callback_api_for_service, ) -from app.models import Complaint +from app.models import ( + NOTIFICATION_HARD_BOUNCE, + NOTIFICATION_HARD_GENERAL, + NOTIFICATION_HARD_NOEMAIL, + NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST, + NOTIFICATION_HARD_SUPPRESSED, + NOTIFICATION_PERMANENT_FAILURE, + NOTIFICATION_SOFT_ATTACHMENTREJECTED, + NOTIFICATION_SOFT_BOUNCE, + NOTIFICATION_SOFT_CONTENTREJECTED, + NOTIFICATION_SOFT_GENERAL, + NOTIFICATION_SOFT_MAILBOXFULL, + NOTIFICATION_SOFT_MESSAGETOOLARGE, + NOTIFICATION_UNKNOWN_BOUNCE, + NOTIFICATION_UNKNOWN_BOUNCE_SUBTYPE, + Complaint, +) from app.notifications.callbacks import create_complaint_callback_data @@ -44,6 +63,55 @@ def _determine_provider_response(ses_message): return None +def _determine_bounce_response(ses_message): + if ses_message["notificationType"] != "Bounce": + return None + + bounce_type = ses_message["bounce"].get("bounceType") + bounce_subtype = ses_message["bounce"].get("bounceSubType") + + bounce_response = { + "feedback_type": NOTIFICATION_UNKNOWN_BOUNCE, # default to unknown bounce + "feedback_subtype": NOTIFICATION_UNKNOWN_BOUNCE_SUBTYPE, # default to unknown bounce subtype + "ses_feedback_id": ses_message["bounce"].get("feedbackId"), + "ses_feedback_date": ses_message["bounce"].get("timestamp"), + } + + # See https://docs.aws.amazon.com/ses/latest/dg/notification-contents.html#bounce-types for all bounce types + if bounce_type == "Undetermined": # treat this as a soft bounce since we don't know what went wrong + bounce_response["feedback_type"] = NOTIFICATION_SOFT_BOUNCE + bounce_response["feedback_subtype"] = NOTIFICATION_SOFT_GENERAL + elif bounce_type == "Permanent": + bounce_response["feedback_type"] = NOTIFICATION_HARD_BOUNCE + if bounce_subtype == "General": + bounce_response["feedback_subtype"] = NOTIFICATION_HARD_GENERAL + if bounce_subtype == "NoEmail": + bounce_response["feedback_subtype"] = NOTIFICATION_HARD_NOEMAIL + if bounce_subtype == "Suppressed": + bounce_response["feedback_subtype"] = NOTIFICATION_HARD_SUPPRESSED + if bounce_subtype == "OnAccountSuppressionList": + bounce_response["feedback_subtype"] = NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST + elif bounce_type == "Transient": + bounce_response["feedback_type"] = NOTIFICATION_SOFT_BOUNCE + if bounce_subtype == "General": + bounce_response["feedback_subtype"] = NOTIFICATION_SOFT_GENERAL + if bounce_subtype == "MailboxFull": + bounce_response["feedback_subtype"] = NOTIFICATION_SOFT_MAILBOXFULL + if bounce_subtype == "MessageTooLarge": + bounce_response["feedback_subtype"] = NOTIFICATION_SOFT_MESSAGETOOLARGE + if bounce_subtype == "ContentRejected": + bounce_response["feedback_subtype"] = NOTIFICATION_SOFT_CONTENTREJECTED + if bounce_subtype == "AttachmentRejected": + bounce_response["feedback_subtype"] = NOTIFICATION_SOFT_ATTACHMENTREJECTED + else: + current_app.logger.info( + "Unknown bounce type received. SES bounce dict: {}".format( + json.dumps(ses_message).replace("{", "(").replace("}", ")") + ) + ) + return bounce_response + + def get_aws_responses(ses_message): status = _determine_notification_bounce_type(ses_message) @@ -71,7 +139,7 @@ def get_aws_responses(ses_message): }[status] base["provider_response"] = _determine_provider_response(ses_message) - + base["bounce_response"] = _determine_bounce_response(ses_message) return base @@ -94,6 +162,23 @@ def handle_complaint(ses_message): complaint_date=ses_complaint.get("timestamp", None) if ses_complaint else None, ) save_complaint(complaint) + + # if the subtype is onaccountsuppressionlist, update the original notification to be permanent failure + if ses_complaint: + feedback_subtype = ses_complaint.get("complaintSubType", None) + + if feedback_subtype == "OnAccountSuppressionList": + current_app.logger.info( + "Complaint of sub-type 'OnAccountSuppressionList' received; updating notification id {} to permanent-failure".format( + notification.id + ) + ) + _update_notification_status( + notification=notification, + status=NOTIFICATION_PERMANENT_FAILURE, + provider_response="The email address is on the GC Notify suppression list", # TODO: move provider_responses to constants + ) + return complaint, notification, recipient_email diff --git a/app/notifications/process_client_response.py b/app/notifications/process_client_response.py index 6d072e26a9..3c7a8e3b50 100644 --- a/app/notifications/process_client_response.py +++ b/app/notifications/process_client_response.py @@ -51,9 +51,9 @@ def _process_for_status(notification_status, client_name, provider_reference): service_callback_api = get_service_delivery_status_callback_api_for_service(service_id=notification.service_id) # queue callback task only if the service_callback_api exists if service_callback_api: - encrypted_notification = create_delivery_status_callback_data(notification, service_callback_api) + signed_notification = create_delivery_status_callback_data(notification, service_callback_api) send_delivery_status_to_service.apply_async( - [str(notification.id), encrypted_notification], + [str(notification.id), signed_notification], queue=QueueNames.CALLBACKS, ) diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 54db04e590..d3dc3304c8 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -1,5 +1,6 @@ import uuid from datetime import datetime +from typing import List from flask import current_app from notifications_utils.clients import redis @@ -14,21 +15,29 @@ from app.celery import provider_tasks from app.celery.letters_pdf_tasks import create_letters_pdf from app.config import QueueNames +from app.dao.api_key_dao import update_last_used_api_key from app.dao.notifications_dao import ( + bulk_insert_notifications, dao_create_notification, dao_created_scheduled_notification, dao_delete_notifications_by_id, ) +from app.dao.services_dao import dao_fetch_service_by_id +from app.dao.templates_dao import dao_get_template_by_id from app.models import ( EMAIL_TYPE, KEY_TYPE_TEST, LETTER_TYPE, NOTIFICATION_CREATED, SMS_TYPE, + ApiKeyType, Notification, + NotificationType, ScheduledNotification, + Service, ) -from app.utils import get_template_instance +from app.types import VerifiedNotification +from app.utils import get_delivery_queue_for_template, get_template_instance from app.v2.errors import BadRequestError @@ -39,6 +48,13 @@ def create_content_for_notification(template, personalisation): return template_object +def number_of_sms_fragments(template, personalisation): + if template.template_type == "sms": + return create_content_for_notification(template, personalisation).fragment_count + else: + return 0 + + def check_placeholders(template_object): if template_object.missing_data: message = "Missing personalisation for template ID {}: {}".format( @@ -52,11 +68,11 @@ def persist_notification( template_id, template_version, recipient, - service, + service: Service, personalisation, notification_type, api_key_id, - key_type, + key_type: ApiKeyType, created_at=None, job_id=None, job_row_number=None, @@ -70,7 +86,7 @@ def persist_notification( billable_units=None, postage=None, template_postage=None, -): +) -> Notification: notification_created_at = created_at or datetime.utcnow() if not notification_id: notification_id = uuid.uuid4() @@ -94,6 +110,10 @@ def persist_notification( reply_to_text=reply_to_text, billable_units=billable_units, ) + template = dao_get_template_by_id(template_id, template_version, use_cache=True) + notification.queue_name = choose_queue( + notification=notification, research_mode=service.research_mode, queue=get_delivery_queue_for_template(template) + ) if notification_type == SMS_TYPE: formatted_recipient = validate_and_format_phone_number(recipient, international=True) @@ -113,11 +133,130 @@ def persist_notification( if key_type != KEY_TYPE_TEST: if redis_store.get(redis.daily_limit_cache_key(service.id)): redis_store.incr(redis.daily_limit_cache_key(service.id)) - current_app.logger.info("{} {} created at {}".format(notification_type, notification_id, notification_created_at)) + if api_key_id: + update_last_used_api_key(api_key_id, notification_created_at) + return notification + + +def transform_notification( + *, + template_id, + template_version, + recipient, + service, + personalisation, + notification_type, + api_key_id, + key_type, + created_at=None, + job_id=None, + job_row_number=None, + reference=None, + client_reference=None, + notification_id=None, + created_by_id=None, + status=NOTIFICATION_CREATED, + reply_to_text=None, + billable_units=None, + postage=None, + template_postage=None, +) -> Notification: + notification_created_at = created_at or datetime.utcnow() + if not notification_id: + notification_id = uuid.uuid4() + notification = Notification( + id=notification_id, + template_id=template_id, + template_version=template_version, + to=recipient, + service_id=service.id, + personalisation=personalisation, + notification_type=notification_type, + api_key_id=api_key_id, + key_type=key_type, + created_at=notification_created_at, + job_id=job_id, + job_row_number=job_row_number, + client_reference=client_reference, + reference=reference, + created_by_id=created_by_id, + status=status, + reply_to_text=reply_to_text, + billable_units=billable_units, + ) + + if notification_type == SMS_TYPE: + formatted_recipient = validate_and_format_phone_number(recipient, international=True) + recipient_info = get_international_phone_info(formatted_recipient) + notification.normalised_to = formatted_recipient + notification.international = recipient_info.international + notification.phone_prefix = recipient_info.country_prefix + notification.rate_multiplier = recipient_info.billable_units + elif notification_type == EMAIL_TYPE: + notification.normalised_to = format_email_address(notification.to) + notification.international = False + elif notification_type == LETTER_TYPE: + notification.postage = postage or template_postage + return notification +def db_save_and_send_notification(notification: Notification): + dao_create_notification(notification) + if notification.key_type != KEY_TYPE_TEST: + service_id = notification.service_id + if redis_store.get(redis.daily_limit_cache_key(service_id)): + redis_store.incr(redis.daily_limit_cache_key(service_id)) + + current_app.logger.info(f"{notification.notification_type} {notification.id} created at {notification.created_at}") + + deliver_task = choose_deliver_task(notification) + try: + deliver_task.apply_async( + [str(notification.id)], + queue=notification.queue_name, + ) + except Exception: + dao_delete_notifications_by_id(notification.id) + raise + current_app.logger.info( + f"{notification.notification_type} {notification.id} sent to the {notification.queue_name} queue for delivery" + ) + + +def choose_queue(notification, research_mode, queue=None) -> QueueNames: + if research_mode or notification.key_type == KEY_TYPE_TEST: + queue = QueueNames.RESEARCH_MODE + + if notification.notification_type == SMS_TYPE: + if notification.sends_with_custom_number(): + queue = QueueNames.SEND_THROTTLED_SMS + if not queue: + queue = QueueNames.SEND_SMS_MEDIUM + if notification.notification_type == EMAIL_TYPE: + if not queue: + queue = QueueNames.SEND_EMAIL_MEDIUM + if notification.notification_type == LETTER_TYPE: + if not queue: + queue = QueueNames.CREATE_LETTERS_PDF + + return queue + + +def choose_deliver_task(notification): + if notification.notification_type == SMS_TYPE: + deliver_task = provider_tasks.deliver_sms + if notification.sends_with_custom_number(): + deliver_task = provider_tasks.deliver_throttled_sms + if notification.notification_type == EMAIL_TYPE: + deliver_task = provider_tasks.deliver_email + if notification.notification_type == LETTER_TYPE: + deliver_task = create_letters_pdf + + return deliver_task + + def send_notification_to_queue(notification, research_mode, queue=None): if research_mode or notification.key_type == KEY_TYPE_TEST: queue = QueueNames.RESEARCH_MODE @@ -127,14 +266,14 @@ def send_notification_to_queue(notification, research_mode, queue=None): if notification.sends_with_custom_number(): deliver_task = provider_tasks.deliver_throttled_sms queue = QueueNames.SEND_THROTTLED_SMS - if not queue: - queue = QueueNames.SEND_SMS + if not queue or queue == QueueNames.NORMAL: + queue = QueueNames.SEND_SMS_MEDIUM if notification.notification_type == EMAIL_TYPE: - if not queue: - queue = QueueNames.SEND_EMAIL + if not queue or queue == QueueNames.NORMAL: + queue = QueueNames.SEND_EMAIL_MEDIUM deliver_task = provider_tasks.deliver_email if notification.notification_type == LETTER_TYPE: - if not queue: + if not queue or queue == QueueNames.NORMAL: queue = QueueNames.CREATE_LETTERS_PDF deliver_task = create_letters_pdf @@ -144,12 +283,97 @@ def send_notification_to_queue(notification, research_mode, queue=None): dao_delete_notifications_by_id(notification.id) raise - current_app.logger.debug( + current_app.logger.info( "{} {} sent to the {} queue for delivery".format(notification.notification_type, notification.id, queue) ) + # TODO: once we've cleaned up all the unused code paths and ensured that this warning never occurs we can delete + # the warning as well as the above calculation of queue. + if notification.queue_name != queue: + current_app.logger.info( + f"Warning: notification {notification.id} has queue_name {notification.queue_name} but was sent to queue {queue}" + ) + + +def persist_notifications(notifications: List[VerifiedNotification]) -> List[Notification]: + """ + Persist Notifications takes a list of json objects and creates a list of Notifications + that gets bulk inserted into the DB. + """ + + lofnotifications = [] + api_key_last_used = None + + for notification in notifications: + notification_created_at = notification.get("created_at") or datetime.utcnow() + notification_id = notification.get("notification_id", uuid.uuid4()) + notification_recipient = notification.get("recipient") or notification.get("to") + service_id = notification.get("service").id if notification.get("service") else None # type: ignore + # todo: potential bug. notification_obj is being created using some keys that don't exist on notification + # reference, created_by_id, status, billable_units aren't keys on notification at this point + notification_obj = Notification( + id=notification_id, + template_id=notification.get("template_id"), + template_version=notification.get("template_version"), + to=notification_recipient, + service_id=service_id, + personalisation=notification.get("personalisation"), + notification_type=notification.get("notification_type"), + api_key_id=notification.get("api_key_id"), + key_type=notification.get("key_type"), + created_at=notification_created_at, + job_id=notification.get("job_id"), + job_row_number=notification.get("job_row_number"), + client_reference=notification.get("client_reference"), + reference=notification.get("reference"), # type: ignore + created_by_id=notification.get("created_by_id"), # type: ignore + status=notification.get("status"), # type: ignore + reply_to_text=notification.get("reply_to_text"), + billable_units=notification.get("billable_units"), # type: ignore + ) + template = dao_get_template_by_id(notification_obj.template_id, notification_obj.template_version, use_cache=True) + service = dao_fetch_service_by_id(service_id, use_cache=True) + notification_obj.queue_name = choose_queue( + notification=notification_obj, research_mode=service.research_mode, queue=get_delivery_queue_for_template(template) + ) + + if notification.get("notification_type") == SMS_TYPE: + formatted_recipient = validate_and_format_phone_number(notification_recipient, international=True) + recipient_info = get_international_phone_info(formatted_recipient) + notification_obj.normalised_to = formatted_recipient + notification_obj.international = recipient_info.international + notification_obj.phone_prefix = recipient_info.country_prefix + notification_obj.rate_multiplier = recipient_info.billable_units + elif notification.get("notification_type") == EMAIL_TYPE: + notification_obj.normalised_to = format_email_address(notification_recipient) + elif notification.get("notification_type") == LETTER_TYPE: + notification_obj.postage = notification.get("postage") or notification.get("template_postage") # type: ignore + + lofnotifications.append(notification_obj) + if notification.get("key_type") != KEY_TYPE_TEST: + service_id = notification.get("service").id # type: ignore + if redis_store.get(redis.daily_limit_cache_key(service_id)): + redis_store.incr(redis.daily_limit_cache_key(service_id)) + + current_app.logger.info( + "{} {} created at {}".format( + notification.get("notification_type"), + notification.get("notification_id"), + notification.get("notification_created_at"), # type: ignore + ) + ) + # If the bulk message is sent using an api key, we want to keep track of the last time the api key was used + # We will only update the api key once + api_key_id = notification.get("api_key_id") + if api_key_id: + api_key_last_used = datetime.utcnow() + if api_key_last_used: + update_last_used_api_key(api_key_id, api_key_last_used) + bulk_insert_notifications(lofnotifications) + + return lofnotifications -def simulated_recipient(to_address, notification_type): +def simulated_recipient(to_address: str, notification_type: NotificationType) -> bool: if notification_type == SMS_TYPE: formatted_simulated_numbers = [ validate_and_format_phone_number(number) for number in current_app.config["SIMULATED_SMS_NUMBERS"] diff --git a/app/notifications/rest.py b/app/notifications/rest.py index e596eec915..e3ec327df6 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -1,4 +1,5 @@ from flask import Blueprint, current_app, jsonify, request +from marshmallow import ValidationError from notifications_utils import SMS_CHAR_COUNT_LIMIT from notifications_utils.recipients import get_international_phone_info @@ -9,8 +10,11 @@ EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, KEY_TYPE_TEAM, + KEY_TYPE_TEST, LETTER_TYPE, SMS_TYPE, + NotificationType, + Template, ) from app.notifications.process_notifications import ( persist_notification, @@ -18,6 +22,7 @@ simulated_recipient, ) from app.notifications.validators import ( + check_email_daily_limit, check_rate_limiting, check_template_is_active, check_template_is_for_notification_type, @@ -31,6 +36,7 @@ ) from app.service.utils import service_allowed_to_send_to from app.utils import ( + get_delivery_queue_for_template, get_document_url, get_public_notify_type_text, get_template_instance, @@ -47,15 +53,15 @@ def get_notification_by_id(notification_id): notification = notifications_dao.get_notification_with_personalisation( str(authenticated_service.id), notification_id, key_type=None ) - return ( - jsonify(data={"notification": notification_with_personalisation_schema.dump(notification).data}), - 200, - ) + if notification is not None: + return jsonify(data={"notification": notification_with_personalisation_schema.dump(notification)}), 200 + else: + return jsonify(result="error", message="Notification not found in database"), 404 @notifications.route("/notifications", methods=["GET"]) def get_all_notifications(): - data = notifications_filter_schema.load(request.args).data + data = notifications_filter_schema.load(request.args) include_jobs = data.get("include_jobs", False) page = data.get("page", 1) page_size = data.get("page_size", current_app.config.get("API_PAGE_SIZE")) @@ -73,7 +79,7 @@ def get_all_notifications(): ) return ( jsonify( - notifications=notification_with_personalisation_schema.dump(pagination.items, many=True).data, + notifications=notification_with_personalisation_schema.dump(pagination.items, many=True), page_size=page_size, total=pagination.total, links=pagination_links(pagination, ".get_all_notifications", **request.args.to_dict()), @@ -83,26 +89,32 @@ def get_all_notifications(): @notifications.route("/notifications/", methods=["POST"]) -def send_notification(notification_type): - +def send_notification(notification_type: NotificationType): if notification_type not in [SMS_TYPE, EMAIL_TYPE]: msg = "{} notification type is not supported".format(notification_type) msg = msg + ", please use the latest version of the client" if notification_type == LETTER_TYPE else msg raise InvalidRequest(msg, 400) - notification_form, errors = ( - sms_template_notification_schema if notification_type == SMS_TYPE else email_notification_schema - ).load(request.get_json()) - - if errors: + try: + notification_form = ( # type: ignore + sms_template_notification_schema if notification_type == SMS_TYPE else email_notification_schema + ).load(request.get_json()) + except ValidationError as err: + errors = err.messages raise InvalidRequest(errors, status_code=400) + current_app.logger.info(f"POST to V1 API: send_notification, service_id: {authenticated_service.id}") + check_rate_limiting(authenticated_service, api_user) template = templates_dao.dao_get_template_by_id_and_service_id( template_id=notification_form["template"], service_id=authenticated_service.id ) + simulated = simulated_recipient(notification_form["to"], notification_type) + if not simulated != api_user.key_type == KEY_TYPE_TEST and notification_type == EMAIL_TYPE: + check_email_daily_limit(authenticated_service, 1) + check_template_is_for_notification_type(notification_type, template.template_type) check_template_is_active(template) @@ -119,12 +131,11 @@ def send_notification(notification_type): _service_can_send_internationally(authenticated_service, notification_form["to"]) # Do not persist or send notification to the queue if it is a simulated recipient - simulated = simulated_recipient(notification_form["to"], notification_type) notification_model = persist_notification( template_id=template.id, template_version=template.version, template_postage=template.postage, - recipient=request.get_json()["to"], + recipient=request.get_json()["to"], # type: ignore service=authenticated_service, personalisation=notification_form.get("personalisation", None), notification_type=notification_type, @@ -137,7 +148,7 @@ def send_notification(notification_type): send_notification_to_queue( notification=notification_model, research_mode=authenticated_service.research_mode, - queue=template.queue_to_use(), + queue=get_delivery_queue_for_template(template), ) else: current_app.logger.debug("POST simulated notification for id: {}".format(notification_model.id)) @@ -174,7 +185,7 @@ def _service_allowed_to_send_to(notification, service): # FIXME: hard code it for now until we can get en/fr specific links and text if api_user.key_type == KEY_TYPE_TEAM: message = ( - "Can’t send to this recipient using a team-only API key " + f"Can’t send to this recipient using a team-only API key (service {service.id}) " f'- see {get_document_url("en", "keys.html#team-and-safelist")}' ) else: @@ -184,7 +195,7 @@ def _service_allowed_to_send_to(notification, service): raise InvalidRequest({"to": [message]}, status_code=400) -def create_template_object_for_notification(template, personalisation): +def create_template_object_for_notification(template, personalisation) -> Template: template_object = get_template_instance(template.__dict__, personalisation) if template_object.missing_data: diff --git a/app/notifications/validators.py b/app/notifications/validators.py index 0277e3894b..a144c22229 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -1,12 +1,17 @@ import base64 -from datetime import datetime, timedelta +import functools +from datetime import datetime, time, timedelta from flask import current_app from notifications_utils import SMS_CHAR_COUNT_LIMIT from notifications_utils.clients.redis import ( daily_limit_cache_key, near_daily_limit_cache_key, + near_email_daily_limit_cache_key, + near_sms_daily_limit_cache_key, over_daily_limit_cache_key, + over_email_daily_limit_cache_key, + over_sms_daily_limit_cache_key, rate_limit_cache_key, ) from notifications_utils.recipients import ( @@ -22,6 +27,7 @@ from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_letter_contact_dao import dao_get_letter_contact_by_id from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id +from app.email_limit_utils import fetch_todays_email_count, increment_todays_email_count from app.models import ( EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, @@ -30,22 +36,48 @@ LETTER_TYPE, SCHEDULE_NOTIFICATIONS, SMS_TYPE, + ApiKey, + ApiKeyType, + NotificationType, + Permission, + Service, + Template, + TemplateType, ) from app.notifications.process_notifications import create_content_for_notification from app.service.sender import send_notification_to_service_users from app.service.utils import service_allowed_to_send_to -from app.utils import get_document_url, get_public_notify_type_text, is_blank +from app.sms_fragment_utils import ( + fetch_todays_requested_sms_count, + increment_todays_requested_sms_count, +) +from app.utils import ( + get_document_url, + get_limit_reset_time_et, + get_public_notify_type_text, + is_blank, +) from app.v2.errors import ( BadRequestError, + LiveServiceTooManyEmailRequestsError, LiveServiceTooManyRequestsError, + LiveServiceTooManySMSRequestsError, RateLimitError, + TrialServiceTooManyEmailRequestsError, TrialServiceTooManyRequestsError, + TrialServiceTooManySMSRequestsError, ) NEAR_DAILY_LIMIT_PERCENTAGE = 80 / 100 -def check_service_over_api_rate_limit(service, api_key): +def check_service_over_api_rate_limit_and_update_rate(service: Service, api_key: ApiKey): + """This function: + - adds the current timestamp to the api rate limit key in Redis + - expires old data from outside the `interval` + - checks if the service is over the api rate limit in the `interval` + - raises an error if the service is over the api rate limit + """ if current_app.config["API_RATE_LIMIT_ENABLED"] and current_app.config["REDIS_ENABLED"]: cache_key = rate_limit_cache_key(service.id, api_key.key_type) rate_limit = service.rate_limit @@ -65,23 +97,145 @@ def check_service_over_api_rate_limit(service, api_key): counter_name="rate_limit.live_service_daily", exception=LiveServiceTooManyRequestsError, ) -def check_service_over_daily_message_limit(key_type, service): +def check_service_over_daily_message_limit(key_type: ApiKeyType, service: Service): if key_type != KEY_TYPE_TEST and current_app.config["REDIS_ENABLED"]: cache_key = daily_limit_cache_key(service.id) messages_sent = redis_store.get(cache_key) if not messages_sent: messages_sent = services_dao.fetch_todays_total_message_count(service.id) - redis_store.set(cache_key, messages_sent, ex=int(timedelta(hours=1).total_seconds())) + redis_store.set(cache_key, messages_sent, ex=int(timedelta(hours=2).total_seconds())) warn_about_daily_message_limit(service, int(messages_sent)) -def check_rate_limiting(service, api_key): - check_service_over_api_rate_limit(service, api_key) - check_service_over_daily_message_limit(api_key.key_type, service) +@statsd_catch( + namespace="validators", + counter_name="rate_limit.trial_service_daily_sms", + exception=TrialServiceTooManySMSRequestsError, +) +@statsd_catch( + namespace="validators", + counter_name="rate_limit.live_service_daily_sms", + exception=LiveServiceTooManySMSRequestsError, +) +def check_sms_daily_limit(service: Service, requested_sms=0): + messages_sent = fetch_todays_requested_sms_count(service.id) + over_sms_daily_limit = (messages_sent + requested_sms) > service.sms_daily_limit + + # Send a warning when reaching the daily message limit + if not over_sms_daily_limit: + return + + current_app.logger.info( + f"service {service.id} is exceeding their daily sms limit [total sent today: {int(messages_sent)} limit: {service.sms_daily_limit}, attempted send: {requested_sms}" + ) + if service.restricted: + raise TrialServiceTooManySMSRequestsError(service.sms_daily_limit) + else: + raise LiveServiceTooManySMSRequestsError(service.sms_daily_limit) + + +@statsd_catch( + namespace="validators", + counter_name="rate_limit.trial_service_daily_email", + exception=TrialServiceTooManyEmailRequestsError, +) +@statsd_catch( + namespace="validators", + counter_name="rate_limit.live_service_daily_email", + exception=LiveServiceTooManyEmailRequestsError, +) +def check_email_daily_limit(service: Service, requested_email=0): + emails_sent_today = fetch_todays_email_count(service.id) + bool_over_email_daily_limit = (emails_sent_today + requested_email) > service.message_limit + + # Send a warning when reaching the daily email limit + if not bool_over_email_daily_limit: + return + + current_app.logger.info( + f"service {service.id} is exceeding their daily email limit [total sent today: {int(emails_sent_today)} limit: {service.message_limit}, attempted send: {requested_email}" + ) + if service.restricted: + raise TrialServiceTooManyEmailRequestsError(service.message_limit) + else: + raise LiveServiceTooManyEmailRequestsError(service.message_limit) + + +def send_warning_email_limit_emails_if_needed(service: Service) -> None: + """ + Function that decides if we should send email warnings about nearing or reaching the email daily limit. + """ + todays_current_email_count = fetch_todays_email_count(service.id) + bool_nearing_email_daily_limit = todays_current_email_count >= NEAR_DAILY_LIMIT_PERCENTAGE * service.message_limit + bool_at_or_over_email_daily_limit = todays_current_email_count >= service.message_limit + current_time = datetime.utcnow().isoformat() + cache_expiration = int(time_until_end_of_day().total_seconds()) + + # Send a warning when reaching 80% of the daily limit + if bool_nearing_email_daily_limit: + cache_key = near_email_daily_limit_cache_key(service.id) + if not redis_store.get(cache_key): + send_near_email_limit_email(service, todays_current_email_count) + redis_store.set(cache_key, current_time, ex=cache_expiration) + + # Send a warning when reaching the daily message limit + if bool_at_or_over_email_daily_limit: + cache_key = over_email_daily_limit_cache_key(service.id) + if not redis_store.get(cache_key): + send_email_limit_reached_email(service) + redis_store.set(cache_key, current_time, ex=cache_expiration) + + +def send_warning_sms_limit_emails_if_needed(service: Service): + todays_requested_sms = fetch_todays_requested_sms_count(service.id) + nearing_sms_daily_limit = todays_requested_sms >= NEAR_DAILY_LIMIT_PERCENTAGE * service.sms_daily_limit + at_or_over_sms_daily_limit = todays_requested_sms >= service.sms_daily_limit + current_time = datetime.utcnow().isoformat() + cache_expiration = int(time_until_end_of_day().total_seconds()) + + # Send a warning when reaching 80% of the daily limit + if nearing_sms_daily_limit: + cache_key = near_sms_daily_limit_cache_key(service.id) + if not redis_store.get(cache_key): + send_near_sms_limit_email(service, todays_requested_sms) + redis_store.set(cache_key, current_time, ex=cache_expiration) + + # Send a warning when reaching the daily message limit + if at_or_over_sms_daily_limit: + cache_key = over_sms_daily_limit_cache_key(service.id) + if not redis_store.get(cache_key): + send_sms_limit_reached_email(service) + redis_store.set(cache_key, current_time, ex=cache_expiration) + + +def time_until_end_of_day() -> timedelta: + """ + Get timedelta until end of day on the datetime passed, or current time. + """ + dt = datetime.now() + tomorrow = dt + timedelta(days=1) + return datetime.combine(tomorrow, time.min) - dt + + +def increment_sms_daily_count_send_warnings_if_needed(service: Service, requested_sms=0) -> None: + if not current_app.config["REDIS_ENABLED"]: + return + + increment_todays_requested_sms_count(service.id, requested_sms) + send_warning_sms_limit_emails_if_needed(service) + + +def increment_email_daily_count_send_warnings_if_needed(service: Service, requested_email=0) -> None: + increment_todays_email_count(service.id, requested_email) + send_warning_email_limit_emails_if_needed(service) + + +def check_rate_limiting(service: Service, api_key: ApiKey): + check_service_over_api_rate_limit_and_update_rate(service, api_key) -def warn_about_daily_message_limit(service, messages_sent): +def warn_about_daily_message_limit(service: Service, messages_sent): nearing_daily_message_limit = messages_sent >= NEAR_DAILY_LIMIT_PERCENTAGE * service.message_limit over_daily_message_limit = messages_sent >= service.message_limit @@ -98,12 +252,16 @@ def warn_about_daily_message_limit(service, messages_sent): template_id=current_app.config["NEAR_DAILY_LIMIT_TEMPLATE_ID"], personalisation={ "service_name": service.name, + "count": messages_sent, "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", "message_limit_en": "{:,}".format(service.message_limit), "message_limit_fr": "{:,}".format(service.message_limit).replace(",", " "), }, include_user_fields=["name"], ) + current_app.logger.info( + f"service {service.id} is approaching its daily limit, sent {int(messages_sent)} limit {service.message_limit}" + ) # Send a warning when reaching the daily message limit if over_daily_message_limit: @@ -123,9 +281,7 @@ def warn_about_daily_message_limit(service, messages_sent): ) current_app.logger.info( - "service {} has been rate limited for daily use sent {} limit {}".format( - service.id, int(messages_sent), service.message_limit - ) + f"service {service.id} has been rate limited for daily use sent {int(messages_sent)} limit {service.message_limit}" ) if service.restricted: raise TrialServiceTooManyRequestsError(service.message_limit) @@ -133,7 +289,91 @@ def warn_about_daily_message_limit(service, messages_sent): raise LiveServiceTooManyRequestsError(service.message_limit) -def check_template_is_for_notification_type(notification_type, template_type): +def send_near_sms_limit_email(service: Service, sms_sent): + limit_reset_time_et = get_limit_reset_time_et() + sms_remaining = service.sms_daily_limit - sms_sent + send_notification_to_service_users( + service_id=service.id, + template_id=current_app.config["NEAR_DAILY_SMS_LIMIT_TEMPLATE_ID"], + personalisation={ + "service_name": service.name, + "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", + "count_en": "{:,}".format(sms_sent), + "count_fr": "{:,}".format(sms_sent).replace(",", " "), + "remaining_en": "{:,}".format(sms_remaining), + "remaining_fr": "{:,}".format(sms_remaining).replace(",", " "), + "message_limit_en": "{:,}".format(service.sms_daily_limit), + "message_limit_fr": "{:,}".format(service.sms_daily_limit).replace(",", " "), + "limit_reset_time_et_12hr": limit_reset_time_et["12hr"], + "limit_reset_time_et_24hr": limit_reset_time_et["24hr"], + }, + include_user_fields=["name"], + ) + current_app.logger.info(f"service {service.id} is approaching its daily sms limit of {service.sms_daily_limit}") + + +def send_near_email_limit_email(service: Service, emails_sent) -> None: + """ + Send an email to service users when nearing the daily email limit. + + """ + limit_reset_time_et = get_limit_reset_time_et() + emails_remaining = service.message_limit - emails_sent + send_notification_to_service_users( + service_id=service.id, + template_id=current_app.config["NEAR_DAILY_EMAIL_LIMIT_TEMPLATE_ID"], + personalisation={ + "service_name": service.name, + "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", + "count_en": "{:,}".format(emails_sent), + "count_fr": "{:,}".format(emails_sent).replace(",", " "), + "remaining_en": "{:,}".format(emails_remaining), + "remaining_fr": "{:,}".format(emails_remaining).replace(",", " "), + "message_limit_en": "{:,}".format(service.message_limit), + "message_limit_fr": "{:,}".format(service.message_limit).replace(",", " "), + "limit_reset_time_et_12hr": limit_reset_time_et["12hr"], + "limit_reset_time_et_24hr": limit_reset_time_et["24hr"], + }, + include_user_fields=["name"], + ) + current_app.logger.info(f"service {service.id} is approaching its daily email limit of {service.message_limit}") + + +def send_sms_limit_reached_email(service: Service): + limit_reset_time_et = get_limit_reset_time_et() + send_notification_to_service_users( + service_id=service.id, + template_id=current_app.config["REACHED_DAILY_SMS_LIMIT_TEMPLATE_ID"], + personalisation={ + "service_name": service.name, + "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", + "message_limit_en": "{:,}".format(service.sms_daily_limit), + "message_limit_fr": "{:,}".format(service.sms_daily_limit).replace(",", " "), + "limit_reset_time_et_12hr": limit_reset_time_et["12hr"], + "limit_reset_time_et_24hr": limit_reset_time_et["24hr"], + }, + include_user_fields=["name"], + ) + + +def send_email_limit_reached_email(service: Service): + limit_reset_time_et = get_limit_reset_time_et() + send_notification_to_service_users( + service_id=service.id, + template_id=current_app.config["REACHED_DAILY_EMAIL_LIMIT_TEMPLATE_ID"], + personalisation={ + "service_name": service.name, + "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", + "message_limit_en": "{:,}".format(service.message_limit), + "message_limit_fr": "{:,}".format(service.message_limit).replace(",", " "), + "limit_reset_time_et_12hr": limit_reset_time_et["12hr"], + "limit_reset_time_et_24hr": limit_reset_time_et["24hr"], + }, + include_user_fields=["name"], + ) + + +def check_template_is_for_notification_type(notification_type: NotificationType, template_type: TemplateType): if notification_type != template_type: message = "{0} template is not suitable for {1} notification".format(template_type, notification_type) raise BadRequestError(fields=[{"template": message}], message=message) @@ -147,39 +387,41 @@ def check_template_is_active(template): ) -def service_can_send_to_recipient(send_to, key_type, service, allow_safelisted_recipients=True): +def service_can_send_to_recipient(send_to, key_type: ApiKeyType, service: Service, allow_safelisted_recipients=True): if not service_allowed_to_send_to(send_to, service, key_type, allow_safelisted_recipients): # FIXME: hard code it for now until we can get en/fr specific links and text if key_type == KEY_TYPE_TEAM: message = ( - "Can’t send to this recipient using a team-only API key " + f"Can’t send to this recipient using a team-only API key (service {service.id}) " f'- see {get_document_url("en", "keys.html#team-and-safelist")}' ) else: message = ( "Can’t send to this recipient when service is in trial mode " f'– see {get_document_url("en", "keys.html#live")}' ) - raise BadRequestError(message=message) + raise BadRequestError(message=message, status_code=400) -def service_has_permission(notify_type, permissions): +def service_has_permission(notify_type, permissions: list[Permission]): return notify_type in [p.permission for p in permissions] -def check_service_has_permission(notify_type, permissions): +def check_service_has_permission(notify_type, permissions: list[Permission]): if not service_has_permission(notify_type, permissions): raise BadRequestError( message="Service is not allowed to send {}".format(get_public_notify_type_text(notify_type, plural=True)) ) -def check_service_can_schedule_notification(permissions, scheduled_for): +def check_service_can_schedule_notification(permissions: list[Permission], scheduled_for): if scheduled_for: if not service_has_permission(SCHEDULE_NOTIFICATIONS, permissions): raise BadRequestError(message="Cannot schedule notifications (this feature is invite-only)") -def validate_and_format_recipient(send_to, key_type, service, notification_type, allow_safelisted_recipients=True): +def validate_and_format_recipient( + send_to, key_type: ApiKeyType, service: Service, notification_type: NotificationType, allow_safelisted_recipients=True +): if send_to is None: raise BadRequestError(message="Recipient can't be empty") @@ -196,8 +438,12 @@ def validate_and_format_recipient(send_to, key_type, service, notification_type, return validate_and_format_email_address(email_address=send_to) -def check_sms_content_char_count(content_count): - if content_count > SMS_CHAR_COUNT_LIMIT: +def check_sms_content_char_count(content_count, service_name, prefix_sms: bool): + content_length = ( + content_count + len(service_name) + 2 if prefix_sms else content_count + ) # the +2 is to account for the ': ' that is added to the service name + + if content_length > SMS_CHAR_COUNT_LIMIT: message = "Content for template has a character count greater than the limit of {}".format(SMS_CHAR_COUNT_LIMIT) raise BadRequestError(message=message) @@ -208,28 +454,28 @@ def check_content_is_not_blank(content): raise BadRequestError(message=message) -def validate_template_exists(template_id, service): +def validate_template_exists(template_id, service: Service): template = check_template_exists_by_id_and_service(template_id, service) check_template_is_active(template) return template -def validate_template(template_id, personalisation, service, notification_type): +def validate_template(template_id, personalisation, service: Service, notification_type: NotificationType): template = check_template_exists_by_id_and_service(template_id, service) check_template_is_for_notification_type(notification_type, template.template_type) check_template_is_active(template) - template_with_content = create_content_for_notification(template, personalisation) + template_with_content: Template = create_content_for_notification(template, personalisation) if template.template_type == SMS_TYPE: - check_sms_content_char_count(template_with_content.content_count) + check_sms_content_char_count(template_with_content.content_count, service.name, service.prefix_sms) check_content_is_not_blank(template_with_content) return template, template_with_content -def check_template_exists_by_id_and_service(template_id, service): +def check_template_exists_by_id_and_service(template_id, service: Service) -> Template: try: return templates_dao.dao_get_template_by_id_and_service_id(template_id=template_id, service_id=service.id) except NoResultFound: @@ -246,7 +492,7 @@ def check_reply_to(service_id, reply_to_id, type_): return check_service_letter_contact_id(service_id, reply_to_id, type_) -def check_service_email_reply_to_id(service_id, reply_to_id, notification_type): +def check_service_email_reply_to_id(service_id, reply_to_id, notification_type: NotificationType): if reply_to_id: try: return dao_get_reply_to_by_id(service_id, reply_to_id).email_address @@ -255,7 +501,7 @@ def check_service_email_reply_to_id(service_id, reply_to_id, notification_type): raise BadRequestError(message=message) -def check_service_sms_sender_id(service_id, sms_sender_id, notification_type): +def check_service_sms_sender_id(service_id, sms_sender_id, notification_type: NotificationType): if sms_sender_id: try: return dao_get_service_sms_senders_by_id(service_id, sms_sender_id).sms_sender @@ -264,7 +510,7 @@ def check_service_sms_sender_id(service_id, sms_sender_id, notification_type): raise BadRequestError(message=message) -def check_service_letter_contact_id(service_id, letter_contact_id, notification_type): +def check_service_letter_contact_id(service_id, letter_contact_id, notification_type: NotificationType): if letter_contact_id: try: return dao_get_letter_contact_by_id(service_id, letter_contact_id).contact_block @@ -273,12 +519,68 @@ def check_service_letter_contact_id(service_id, letter_contact_id, notification_ raise BadRequestError(message=message) -def decode_personalisation_files(personalisation_data): +def validate_personalisation_and_decode_files(json_personalisation): + errors = [] + json_personalisation, errors_vars = validate_personalisation_size(json_personalisation) + json_personalisation, errors_num_file = validate_personalisation_num_files(json_personalisation) + json_personalisation, errors_files = decode_personalisation_files(json_personalisation) + errors.extend(errors_vars) + errors.extend(errors_num_file) + errors.extend(errors_files) + return json_personalisation, errors + + +def validate_personalisation_size(json_personalisation): + errors = [] + values = [v for _, v in json_personalisation.items() if not isinstance(v, dict)] + concat_values = functools.reduce(lambda v1, v2: f"{v1}{v2}", values, "") + size_all_values = len(concat_values) + size_limit = current_app.config["PERSONALISATION_SIZE_LIMIT"] + current_app.logger.debug(f"Personalization size of variables detected at {size_all_values} bytes.") + if size_all_values > size_limit: + errors.append( + { + "error": "ValidationError", + "message": f"Personalisation variables size of {size_all_values} bytes is greater than allowed limit of {size_limit} bytes.", + } + ) + + return json_personalisation, errors + + +def validate_personalisation_num_files(json_personalisation): + errors = [] + file_keys = [k for k, v in json_personalisation.items() if isinstance(v, dict) and "file" in v] + files_num = len(file_keys) + num_limit = current_app.config["ATTACHMENT_NUM_LIMIT"] + if files_num > num_limit: + current_app.logger.debug(f"Number of file attachments detected at {files_num}.") + errors.append( + { + "error": "ValidationError", + "message": f"File number exceed allowed limits of {num_limit} with number of {files_num}.", + } + ) + return json_personalisation, errors + + +def decode_personalisation_files(json_personalisation): errors = [] - file_keys = [k for k, v in personalisation_data.items() if isinstance(v, dict) and "file" in v] + file_keys = [k for k, v in json_personalisation.items() if isinstance(v, dict) and "file" in v] for key in file_keys: try: - personalisation_data[key]["file"] = base64.b64decode(personalisation_data[key]["file"]) + json_personalisation[key]["file"] = base64.b64decode(json_personalisation[key]["file"]) + personalisation_size = len(json_personalisation[key]["file"]) + current_app.logger.debug(f"File size detected at {personalisation_size} bytes.") + size_limit = current_app.config["ATTACHMENT_SIZE_LIMIT"] + if personalisation_size > size_limit: + filename = json_personalisation[key]["filename"] + errors.append( + { + "error": "ValidationError", + "message": f"{key} : File size for {filename} is {personalisation_size} and greater than allowed limit of {size_limit}.", + } + ) except Exception as e: errors.append( { @@ -286,4 +588,4 @@ def decode_personalisation_files(personalisation_data): "message": f"{key} : {str(e)} : Error decoding base64 field", } ) - return personalisation_data, errors + return json_personalisation, errors diff --git a/app/organisation/invite_rest.py b/app/organisation/invite_rest.py index 31b40bc973..616695d3ae 100644 --- a/app/organisation/invite_rest.py +++ b/app/organisation/invite_rest.py @@ -90,7 +90,6 @@ def invited_org_user_url(invited_org_user_id, invite_link_host=None): token = generate_token( str(invited_org_user_id), current_app.config["SECRET_KEY"], - current_app.config["DANGEROUS_SALT"], ) if invite_link_host is None: diff --git a/app/organisation/rest.py b/app/organisation/rest.py index fa8bc176f0..c952ea7171 100644 --- a/app/organisation/rest.py +++ b/app/organisation/rest.py @@ -13,6 +13,10 @@ dao_get_users_for_organisation, dao_update_organisation, ) +from app.dao.service_data_retention_dao import ( + fetch_service_data_retention_by_notification_type, + insert_service_data_retention, +) from app.dao.services_dao import dao_fetch_service_by_id from app.dao.templates_dao import dao_get_template_by_id from app.errors import InvalidRequest, register_errors @@ -27,6 +31,7 @@ post_update_organisation_schema, ) from app.schema_validation import validate +from app.variables import PT_DATA_RETENTION_DAYS organisation_blueprint = Blueprint("organisation", __name__) register_errors(organisation_blueprint) @@ -61,7 +66,6 @@ def get_organisation_by_id(organisation_id): @organisation_blueprint.route("/by-domain", methods=["GET"]) def get_organisation_by_domain(): - domain = request.args.get("domain") if not domain or "@" in domain: @@ -103,6 +107,18 @@ def update_organisation(organisation_id): raise InvalidRequest("Organisation not found", 404) +def set_pt_data_retention(service_id): + for notification_type in ["email", "sms"]: + data_retention = fetch_service_data_retention_by_notification_type(service_id, notification_type) + + if not data_retention: + insert_service_data_retention( + service_id=service_id, + notification_type=notification_type, + days_of_retention=PT_DATA_RETENTION_DAYS, + ) + + @organisation_blueprint.route("//service", methods=["POST"]) def link_service_to_organisation(organisation_id): data = request.get_json() @@ -112,6 +128,14 @@ def link_service_to_organisation(organisation_id): dao_add_service_to_organisation(service, organisation_id) + # if organisation is P/T, set data retention to 3 days + try: + org = dao_get_organisation_by_id(organisation_id) + if org.organisation_type == "province_or_territory": + set_pt_data_retention(service.id) + except Exception as e: + current_app.logger.error(f"Error setting data retention for service: {service.id}, Error: {e}") + return "", 204 diff --git a/app/provider_details/rest.py b/app/provider_details/rest.py index f8e64429f3..9cc9f714a9 100644 --- a/app/provider_details/rest.py +++ b/app/provider_details/rest.py @@ -39,14 +39,14 @@ def get_providers(): @provider_details.route("/", methods=["GET"]) def get_provider_by_id(provider_details_id): - data = provider_details_schema.dump(get_provider_details_by_id(provider_details_id)).data + data = provider_details_schema.dump(get_provider_details_by_id(provider_details_id)) return jsonify(provider_details=data) @provider_details.route("//versions", methods=["GET"]) def get_provider_versions(provider_details_id): versions = dao_get_provider_versions(provider_details_id) - data = provider_details_history_schema.dump(versions, many=True).data + data = provider_details_history_schema.dump(versions, many=True) return jsonify(data=data) @@ -73,4 +73,4 @@ def update_provider_details(provider_details_id): setattr(provider, key, req_json[key]) dao_update_provider_details(provider) - return jsonify(provider_details=provider_details_schema.dump(provider).data), 200 + return jsonify(provider_details=provider_details_schema.dump(provider)), 200 diff --git a/app/queue.py b/app/queue.py new file mode 100644 index 0000000000..ea8113748e --- /dev/null +++ b/app/queue.py @@ -0,0 +1,274 @@ +import random +import string +from abc import ABC, abstractmethod +from enum import Enum +from typing import Any, Dict, Optional +from uuid import UUID, uuid4 + +from flask import current_app +from redis import Redis + +from app.aws.metrics import ( + put_batch_saving_expiry_metric, + put_batch_saving_inflight_metric, + put_batch_saving_inflight_processed, + put_batch_saving_metric, +) +from app.aws.metrics_logger import MetricsLogger + + +def generate_element(length=10) -> str: + elem = "".join(random.choice(string.ascii_lowercase) for i in range(length)) + return elem + + +def generate_elements(count=10) -> list[str]: + return [generate_element(count) for s in range(count)] + + +class Buffer(Enum): + INBOX = "inbox" + IN_FLIGHT = "in-flight" + + def inbox_name(self, suffix=None, process_type=None): + if process_type and suffix: + return f"{self.value}:{suffix}:{process_type}" + if suffix: + return f"{self.value}:{suffix}" + if process_type: + # Added two ":" to keep the same format as suffix:process_type + return f"{self.value}::{str(process_type)}" + return self.value + + def inflight_prefix(self, suffix: Optional[str] = None, process_type: Optional[str] = None) -> str: + if process_type and suffix: + return f"{Buffer.IN_FLIGHT.value}:{str(suffix)}:{str(process_type)}" + if suffix: + return f"{Buffer.IN_FLIGHT.value}:{str(suffix)}" + if process_type: + # Added two ":" to keep the same format as suffix:process_type + return f"{Buffer.IN_FLIGHT.value}::{str(process_type)}" + return f"{Buffer.IN_FLIGHT.value}" + + def inflight_name(self, receipt: UUID = uuid4(), suffix: Optional[str] = None, process_type: Optional[str] = None) -> str: + return f"{self.inflight_prefix(suffix, process_type)}:{str(receipt)}" + + +class Queue(ABC): + """Queue interface for custom buffer. + + Implementations should allow to poll from the queue and acknowledge + read messages once work is done on these. + """ + + @abstractmethod + def poll(self, count=10) -> tuple[UUID, list[str]]: + """Gets messages out of the queue. + + Each polling is associated with a UUID acting as a receipt. This + can later be used in conjunction with the `acknowledge` function + to confirm that the polled messages were properly processed. + This will delete the in-flight messages and these will not get + back into the main inbox. Failure to achknowledge the polled + messages will get these back into the inbox after a preconfigured + timeout has passed, ready to be retried. + + Args: + count (int, optional): Number of messages to get out of the queue. Defaults to 10. + + Returns: + tuple[UUID, list[str]]: Gets polling receipt and list of polled notifications. + """ + pass + + @abstractmethod + def acknowledge(self, receipt: UUID): + """Acknowledges reception and processing of provided messages IDs. + + Once the acknowledgement is done, the messages will get their in-flight + status removed and will not get served again through the `poll` method. + + Args: + message_ids (list[int]): [description] + """ + pass + + @abstractmethod + def publish(self, message: str): + """Publishes the message into the buffer queue. + + The message is put onto the back of the queue to be later processed + in a FIFO order. + + Args: + message (str): Message to store into the queue. + """ + pass + + +# TODO: Check if we want to move the queue API and implementations into the utils project. +class RedisQueue(Queue): + """Implementation of a queue using Redis.""" + + LUA_MOVE_TO_INFLIGHT = "move-in-inflight" + LUA_EXPIRE_INFLIGHTS = "expire-inflights" + + scripts: Dict[str, Any] = {} + + def __init__(self, suffix=None, expire_inflight_after_seconds=300, process_type=None) -> None: + """ + Constructor for the Redis Queue + + suffix: str + Suffix can be of type "inbox" or "in-flight". Defines what type of Redis list is created + expire_inflight_after_seconds: int + Seconds indicating how long an in-flight list should be kept around before being sent to + the inbox + process_type: str + String indicating the priority of the notification. It can be one of "priority", "bulk" or "normal" + + Return: + ------- + RedisQueue + + """ + self._inbox = Buffer.INBOX.inbox_name(suffix, process_type) + self._suffix = suffix + self._process_type = process_type + self._expire_inflight_after_seconds = expire_inflight_after_seconds + + def init_app(self, redis: Redis, metrics_logger: MetricsLogger): + self._redis_client = redis + self.__register_scripts() + self.__metrics_logger = metrics_logger + + def poll(self, count=10) -> tuple[UUID, list[str]]: + receipt = uuid4() + in_flight_key = Buffer.IN_FLIGHT.inflight_name(receipt, self._suffix, self._process_type) + results = self.__move_to_inflight(in_flight_key, count) + if results: + current_app.logger.info(f"Inflight created: {in_flight_key}") + put_batch_saving_inflight_metric(self.__metrics_logger, self, 1) + return (receipt, results) + + def expire_inflights(self): + if self._process_type: + args = [ + f"{Buffer.IN_FLIGHT.inflight_prefix()}:{self._suffix}:{self._process_type}*", + self._inbox, + self._expire_inflight_after_seconds, + ] + else: + args = [f"{Buffer.IN_FLIGHT.inflight_prefix()}:{self._suffix}*", self._inbox, self._expire_inflight_after_seconds] + expired = self.scripts[self.LUA_EXPIRE_INFLIGHTS](args=args) + if expired: + put_batch_saving_expiry_metric(self.__metrics_logger, self, len(expired)) + current_app.logger.warning(f"Moved inflights {expired} back to inbox {self._inbox}") + + def acknowledge(self, receipt: UUID) -> bool: + """ + Remove the in-flight list from Redis + + Args: + receipt: UUID + id of the inflight to remove + + Returns: True if the inflight was found in that queue and removed, False otherwise + """ + inflight_name = Buffer.IN_FLIGHT.inflight_name(receipt, self._suffix, self._process_type) + if not self._redis_client.exists(inflight_name): + current_app.logger.warning(f"Inflight to delete not found: {inflight_name}") + return False + self._redis_client.delete(inflight_name) + current_app.logger.info(f"Acknowledged inflight: {inflight_name}") + put_batch_saving_inflight_processed(self.__metrics_logger, self, 1) + return True + + def publish(self, message: str): + self._redis_client.rpush(self._inbox, message) + put_batch_saving_metric(self.__metrics_logger, self, 1) + + def __move_to_inflight(self, in_flight_key: str, count: int) -> list[str]: + results = self.scripts[self.LUA_MOVE_TO_INFLIGHT](args=[self._inbox, in_flight_key, count]) + decoded = [result.decode("utf-8") for result in results] + return decoded + + def __register_scripts(self): + self.scripts[self.LUA_MOVE_TO_INFLIGHT] = self._redis_client.register_script( + """ + local DEFAULT_CHUNK = 99 + + local source = ARGV[1] + local destination = ARGV[2] + local source_size = tonumber(redis.call("LLEN", source)) + local count = math.min(source_size, tonumber(ARGV[3])) + + local chunk_size = math.min(math.max(0, count-1), DEFAULT_CHUNK) + local current = 0 + local all = {} + + while current < count do + local elements = redis.call("LRANGE", source, 0, chunk_size) + redis.call("LPUSH", destination, unpack(elements)) + redis.call("LTRIM", source, chunk_size+1, -1) + for i=1,#elements do all[#all+1] = elements[i] end + + current = current + chunk_size+1 + chunk_size = math.min((count-1) - current, DEFAULT_CHUNK) + end + + return all + """ + ) + + self.scripts[self.LUA_EXPIRE_INFLIGHTS] = self._redis_client.register_script( + """ + local DEFAULT_CHUNK = 99 + local inflight_prefix = ARGV[1] + local destination = ARGV[2] + local expire_after = tonumber(ARGV[3]) + + local cursor = "0"; + local expired_inflights = {} + repeat + local scan_result = redis.call("SCAN", cursor, "MATCH", inflight_prefix, "COUNT", 100); + cursor = scan_result[1] + for i, inflight in pairs(scan_result[2]) do + local idle = redis.call("object", "idletime", inflight) + if ( idle > expire_after) then + local count = tonumber(redis.call("LLEN", inflight)) + local chunk_size = math.min(math.max(0, count-1), DEFAULT_CHUNK) + local current = 0 + + while current < count do + local elements = redis.call("LRANGE", inflight, 0, chunk_size) + redis.call("LPUSH", destination, unpack(elements)) + redis.call("LTRIM", inflight, chunk_size+1, -1) + current = current + chunk_size+1 + chunk_size = math.min((count-1) - current, DEFAULT_CHUNK) + end + + expired_inflights[#expired_inflights+1] = inflight + redis.call("del", inflight) + end + end + until cursor == "0"; + return expired_inflights + """ + ) + + +class MockQueue(Queue): + """Implementation of a queue that spits out randomly generated elements. + + Do not use in production!""" + + def poll(self, count=10) -> tuple[UUID, list[str]]: + receipt = uuid4() + return (receipt, generate_elements(count)) + + def acknowledge(self, receipt: UUID): + pass + + def publish(self, message: str): + pass diff --git a/app/schema_validation/__init__.py b/app/schema_validation/__init__.py index 5a4832a038..89641f8e8a 100644 --- a/app/schema_validation/__init__.py +++ b/app/schema_validation/__init__.py @@ -12,7 +12,7 @@ validate_phone_number, ) -from app.notifications.validators import decode_personalisation_files +from app.notifications.validators import validate_personalisation_and_decode_files format_checker = FormatChecker() @@ -87,7 +87,9 @@ def validate(json_to_validate, schema): if errors.__len__() > 0: raise ValidationError(build_error_message(errors)) if json_to_validate.get("personalisation", None): - json_to_validate["personalisation"], errors = decode_personalisation_files(json_to_validate.get("personalisation", {})) + json_to_validate["personalisation"], errors = validate_personalisation_and_decode_files( + json_to_validate.get("personalisation", {}) + ) if errors.__len__() > 0: error_message = json.dumps({"status_code": 400, "errors": errors}) raise ValidationError(error_message) diff --git a/app/schemas.py b/app/schemas.py index c4732c436d..9beae6ab9f 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -1,8 +1,11 @@ from datetime import date, datetime, timedelta +from uuid import UUID +from dateutil.parser import parse from flask import current_app from flask_marshmallow.fields import fields from marshmallow import ( + EXCLUDE, Schema, ValidationError, post_dump, @@ -21,7 +24,7 @@ validate_phone_number, ) -from app import db, ma, models +from app import db, marshmallow, models from app.dao.permissions_dao import permission_dao from app.models import ServicePermission from app.utils import get_template_instance @@ -36,9 +39,10 @@ def _validate_positive_number(value, msg="Not a positive integer"): raise ValidationError(msg) -def _validate_datetime_not_too_far_in_future(dte): +def _validate_datetime_not_too_far_in_future(dte: datetime): max_hours = current_app.config["JOBS_MAX_SCHEDULE_HOURS_AHEAD"] - if dte > datetime.utcnow() + timedelta(hours=max_hours): + max_schedule_time = datetime.utcnow() + timedelta(hours=max_hours) + if dte.timestamp() > max_schedule_time.timestamp(): msg = f"Date cannot be more than {max_hours} hours in the future" raise ValidationError(msg) @@ -48,33 +52,51 @@ def _validate_not_in_future(dte, msg="Date cannot be in the future"): raise ValidationError(msg) -def _validate_not_in_past(dte, msg="Date cannot be in the past"): - if dte < date.today(): +def _validate_datetime_not_in_past(dte: datetime, msg="Date cannot be in the past"): + if dte.timestamp() < datetime.utcnow().timestamp(): raise ValidationError(msg) -def _validate_datetime_not_in_future(dte, msg="Date cannot be in the future"): - if dte > datetime.utcnow(): - raise ValidationError(msg) +class FlexibleDateTime(fields.DateTime): + """ + Allows input data to not contain tz info. + Outputs data using the output format that marshmallow version 2 used to use, OLD_MARSHMALLOW_FORMAT + """ + DEFAULT_FORMAT = "flexible" + # OLD_MARSHMALLOW_FORMAT = "%Y-%m-%dT%H:%M:%S+00:00" + OLD_MARSHMALLOW_FORMAT = "%Y-%m-%dT%H:%M:%S.%f+00:00" -def _validate_datetime_not_in_past(dte, msg="Date cannot be in the past"): - if dte < datetime.utcnow(): - raise ValidationError(msg) + def __init__(self, *args, allow_none=True, **kwargs): + super().__init__(*args, allow_none=allow_none, **kwargs) + self.DESERIALIZATION_FUNCS["flexible"] = parse + self.SERIALIZATION_FUNCS["flexible"] = lambda x: x.strftime(self.OLD_MARSHMALLOW_FORMAT) + + +class UUIDsAsStringsMixin: + @post_dump() + def __post_dump(self, data, **kwargs): + for key, value in data.items(): + if isinstance(value, UUID): + data[key] = str(value) + if isinstance(value, list): + data[key] = [(str(item) if isinstance(item, UUID) else item) for item in value] + return data -class BaseSchema(ma.SQLAlchemyAutoSchema): # type: ignore +class BaseSchema(marshmallow.SQLAlchemyAutoSchema): # type: ignore class Meta: sqla_session = db.session load_instance = True include_relationships = True + unknown = EXCLUDE def __init__(self, load_json=False, *args, **kwargs): self.load_json = load_json super(BaseSchema, self).__init__(*args, **kwargs) @post_load - def make_instance(self, data): + def make_instance(self, data, **kwargs): """Deserialize data to an instance of the model. Update an existing row if specified in `self.instance` or loaded by primary key(s) in the data; else create a new row. @@ -85,12 +107,39 @@ def make_instance(self, data): return super(BaseSchema, self).make_instance(data) -class UserSchema(BaseSchema): +class TemplateCategorySchema(BaseSchema): + class Meta(BaseSchema.Meta): + model = models.TemplateCategory + + @validates("name_en") + def validate_name_en(self, value): + if not value: + raise ValidationError("Invalid name") + + @validates("name_fr") + def validate_name_fr(self, value): + if not value: + raise ValidationError("Invalid name") + + @validates("sms_process_type") + def validate_sms_process_type(self, value): + if value not in models.TEMPLATE_PROCESS_TYPE: + raise ValidationError("Invalid SMS process type") + + @validates("email_process_type") + def validate_email_process_type(self, value): + if value not in models.TEMPLATE_PROCESS_TYPE: + raise ValidationError("Invalid email process type") + +class UserSchema(BaseSchema): permissions = fields.Method("user_permissions", dump_only=True) password_changed_at = field_for(models.User, "password_changed_at", format="%Y-%m-%d %H:%M:%S.%f") created_at = field_for(models.User, "created_at", format="%Y-%m-%d %H:%M:%S.%f") + updated_at = FlexibleDateTime() + logged_in_at = FlexibleDateTime() auth_type = field_for(models.User, "auth_type") + password = fields.String(required=True, load_only=True) def user_permissions(self, usr): retval = {} @@ -104,14 +153,11 @@ def user_permissions(self, usr): class Meta(BaseSchema.Meta): model = models.User exclude = ( - "updated_at", - "created_at", - "user_to_service", - "user_to_organisation", "_password", + "created_at", + "updated_at", "verify_codes", ) - strict = True @validates("name") def validate_name(self, value): @@ -140,19 +186,17 @@ class UserUpdateAttributeSchema(BaseSchema): class Meta(BaseSchema.Meta): model = models.User exclude = ( - "id", - "updated_at", - "created_at", - "user_to_service", "_password", - "verify_codes", + "created_at", + "failed_login_count", + "id", "logged_in_at", "password_changed_at", - "failed_login_count", - "state", "platform_admin", + "state", + "updated_at", + "verify_codes", ) - strict = True @validates("name") def validate_name(self, value): @@ -175,7 +219,7 @@ def validate_mobile_number(self, value): raise ValidationError("Invalid phone number: {}".format(error)) @validates_schema(pass_original=True) - def check_unknown_fields(self, data, original_data): + def check_unknown_fields(self, data, original_data, **kwargs): for key in original_data: if key not in self.fields: raise ValidationError("Unknown field name {}".format(key)) @@ -185,10 +229,9 @@ class UserUpdatePasswordSchema(BaseSchema): class Meta(BaseSchema.Meta): model = models.User only = "password" - strict = True @validates_schema(pass_original=True) - def check_unknown_fields(self, data, original_data): + def check_unknown_fields(self, data, original_data, **kwargs): for key in original_data: if key not in self.fields: raise ValidationError("Unknown field name {}".format(key)) @@ -196,41 +239,53 @@ def check_unknown_fields(self, data, original_data): class ProviderDetailsSchema(BaseSchema): created_by = fields.Nested(UserSchema, only=["id", "name", "email_address"], dump_only=True) + updated_at = FlexibleDateTime() class Meta(BaseSchema.Meta): model = models.ProviderDetails - exclude = ("provider_rates", "provider_stats") - strict = True + exclude = ["provider_rates"] class ProviderDetailsHistorySchema(BaseSchema): created_by = fields.Nested(UserSchema, only=["id", "name", "email_address"], dump_only=True) + updated_at = FlexibleDateTime() class Meta(BaseSchema.Meta): model = models.ProviderDetailsHistory - exclude = ("provider_rates", "provider_stats") - strict = True - + # exclude = ("provider_rates", "provider_stats") -class ServiceSchema(BaseSchema): +class ServiceSchema(BaseSchema, UUIDsAsStringsMixin): created_by = field_for(models.Service, "created_by", required=True) organisation_type = field_for(models.Service, "organisation_type") letter_logo_filename = fields.Method(dump_only=True, serialize="get_letter_logo_filename") - permissions = fields.Method("service_permissions") + permissions = fields.Method("serialize_service_permissions", "deserialize_service_permissions") email_branding = field_for(models.Service, "email_branding") default_branding_is_french = field_for(models.Service, "default_branding_is_french") organisation = field_for(models.Service, "organisation") override_flag = False letter_contact_block = fields.Method(serialize="get_letter_contact") go_live_at = field_for(models.Service, "go_live_at", format="%Y-%m-%d %H:%M:%S.%f") + organisation_notes = field_for(models.Service, "organisation_notes") def get_letter_logo_filename(self, service): return service.letter_branding and service.letter_branding.filename - def service_permissions(self, service): + def serialize_service_permissions(self, service): return [p.permission for p in service.permissions] + def deserialize_service_permissions(self, in_data): + if isinstance(in_data, dict) and "permissions" in in_data: + str_permissions = in_data["permissions"] + permissions = [] + for p in str_permissions: + permission = ServicePermission(service_id=in_data["id"], permission=p) + permissions.append(permission) + + in_data["permissions"] = permissions + + return in_data + def get_letter_contact(self, service): return service.get_default_letter_contact() @@ -238,21 +293,16 @@ class Meta(BaseSchema.Meta): model = models.Service dump_only = ["letter_contact_block"] exclude = ( - "updated_at", + "complaints", "created_at", "api_keys", - "templates", + "letter_contacts", "jobs", - "old_id", - "template_statistics", - "service_provider_stats", - "service_notification_stats", - "service_sms_senders", "reply_to_email_addresses", - "letter_contacts", - "complaints", + "service_sms_senders", + "templates", + "updated_at", ) - strict = True @validates("permissions") def validate_permissions(self, value): @@ -266,7 +316,7 @@ def validate_permissions(self, value): raise ValidationError("Duplicate Service Permission: {}".format(duplicates)) @pre_load() - def format_for_data_model(self, in_data): + def format_for_data_model(self, in_data, **kwargs): if isinstance(in_data, dict) and "permissions" in in_data: str_permissions = in_data["permissions"] permissions = [] @@ -275,53 +325,56 @@ def format_for_data_model(self, in_data): permissions.append(permission) in_data["permissions"] = permissions + return in_data class DetailedServiceSchema(BaseSchema): statistics = fields.Dict() organisation_type = field_for(models.Service, "organisation_type") + go_live_at = FlexibleDateTime() + created_at = FlexibleDateTime() + updated_at = FlexibleDateTime() class Meta(BaseSchema.Meta): model = models.Service exclude = ( + "all_template_folders", + "annual_billing", "api_keys", - "templates", - "users", "created_by", - "jobs", - "template_statistics", - "service_provider_stats", - "service_notification_stats", "email_branding", - "service_sms_senders", - "monthly_billing", - "reply_to_email_addresses", - "letter_contact_block", - "message_limit", "email_from", "inbound_api", - "safelist", - "reply_to_email_address", - "sms_sender", - "permissions", "inbound_number", "inbound_sms", + "jobs", + "message_limit", + "permissions", + "reply_to_email_addresses", + "safelist", + "service_sms_senders", + "sms_daily_limit", + "templates", + "users", + "version", ) class NotificationModelSchema(BaseSchema): class Meta(BaseSchema.Meta): model = models.Notification - strict = True exclude = ( "_personalisation", + "api_key", "job", "service", "template", - "api_key", ) status = fields.String(required=False) + created_at = FlexibleDateTime() + sent_at = FlexibleDateTime() + updated_at = FlexibleDateTime() class BaseTemplateSchema(BaseSchema): @@ -336,34 +389,47 @@ def get_reply_to_text(self, template): class Meta(BaseSchema.Meta): model = models.Template - exclude = ("service_id", "jobs", "service_letter_contact_id") - strict = True + exclude = ("jobs", "service_id", "service_letter_contact_id") class TemplateSchema(BaseTemplateSchema): - created_by = field_for(models.Template, "created_by", required=True) + is_precompiled_letter = fields.Method("get_is_precompiled_letter") process_type = field_for(models.Template, "process_type") + template_category = fields.Nested(TemplateCategorySchema, dump_only=True) + template_category_id = fields.UUID(required=False, allow_none=True) redact_personalisation = fields.Method("redact") + created_at = FlexibleDateTime() + updated_at = FlexibleDateTime() + + def get_is_precompiled_letter(self, template): + return template.is_precompiled_letter def redact(self, template): return template.redact_personalisation @validates_schema - def validate_type(self, data): + def validate_type(self, data, **kwargs): if data.get("template_type") in [models.EMAIL_TYPE, models.LETTER_TYPE]: subject = data.get("subject") if not subject or subject.strip() == "": raise ValidationError("Invalid template subject", "subject") -class TemplateHistorySchema(BaseSchema): +class ReducedTemplateSchema(TemplateSchema): + class Meta(BaseSchema.Meta): + model = models.Template + exclude = ["content", "jobs", "service_id", "service_letter_contact_id"] + +class TemplateHistorySchema(BaseSchema): reply_to = fields.Method("get_reply_to", allow_none=True) reply_to_text = fields.Method("get_reply_to_text", allow_none=True) process_type = field_for(models.Template, "process_type") + template_category = fields.Nested(TemplateCategorySchema, dump_only=True) created_by = fields.Nested(UserSchema, only=["id", "name", "email_address"], dump_only=True) created_at = field_for(models.Template, "created_at", format="%Y-%m-%d %H:%M:%S.%f") + updated_at = FlexibleDateTime() def get_reply_to(self, template): return template.reply_to @@ -376,29 +442,34 @@ class Meta(BaseSchema.Meta): class ApiKeySchema(BaseSchema): - created_by = field_for(models.ApiKey, "created_by", required=True) key_type = field_for(models.ApiKey, "key_type", required=True) + expiry_date = FlexibleDateTime() + created_at = FlexibleDateTime() + updated_at = FlexibleDateTime() class Meta(BaseSchema.Meta): model = models.ApiKey - exclude = ("service", "_secret") - strict = True + exclude = ("_secret", "service") class JobSchema(BaseSchema): created_by_user = fields.Nested( UserSchema, attribute="created_by", - dump_to="created_by", + data_key="created_by", only=["id", "name"], dump_only=True, ) created_by = field_for(models.Job, "created_by", required=True, load_only=True) + created_at = FlexibleDateTime() + updated_at = FlexibleDateTime() + processing_started = FlexibleDateTime() + processing_finished = FlexibleDateTime() api_key_details = fields.Nested( ApiKeySchema, attribute="api_key", - dump_to="api_key", + data_key="api_key", only=["id", "name", "key_type"], dump_only=True, ) @@ -406,11 +477,11 @@ class JobSchema(BaseSchema): job_status = field_for(models.JobStatus, "name", required=False) - scheduled_for = fields.DateTime() + scheduled_for = FlexibleDateTime() service_name = fields.Nested( ServiceSchema, attribute="service", - dump_to="service_name", + data_key="service_name", only=["name"], dump_only=True, ) @@ -425,16 +496,15 @@ class Meta(BaseSchema.Meta): model = models.Job exclude = ( "notifications", - "notifications_sent", "notifications_delivered", "notifications_failed", + "notifications_sent", ) - strict = True class NotificationSchema(Schema): class Meta(BaseSchema.Meta): - strict = True + unknown = EXCLUDE status = fields.String(required=False) personalisation = fields.Dict(required=False) @@ -451,7 +521,7 @@ def validate_to(self, value): raise ValidationError("Invalid phone number: {}".format(error)) @post_load - def format_phone_number(self, item): + def format_phone_number(self, item, **kwargs): item["to"] = validate_and_format_phone_number(item["to"], international=True) return item @@ -490,8 +560,7 @@ class SmsAdminNotificationSchema(SmsNotificationSchema): class NotificationWithTemplateSchema(BaseSchema): class Meta(BaseSchema.Meta): model = models.Notification - strict = True - exclude = ("_personalisation", "scheduled_notification") + # exclude = ("_personalisation", "scheduled_notification") template = fields.Nested( TemplateSchema, @@ -513,9 +582,12 @@ class Meta(BaseSchema.Meta): personalisation = fields.Dict(required=False) key_type = field_for(models.Notification, "key_type", required=True) key_name = fields.String() + created_at = FlexibleDateTime() + updated_at = FlexibleDateTime() + sent_at = FlexibleDateTime() @pre_dump - def add_api_key_name(self, in_data): + def add_api_key_name(self, in_data, **kwargs): if in_data.api_key: in_data.key_name = in_data.api_key.name else: @@ -537,34 +609,34 @@ class Meta(NotificationWithTemplateSchema.Meta): # 'body', 'subject' [for emails], and 'content_char_count' fields = ( # db rows + "billable_units", + "created_at", "id", - "to", "job_row_number", - "template_version", - "billable_units", "notification_type", - "created_at", + "reference", "sent_at", "sent_by", - "updated_at", "status", - "reference", + "template_version", + "to", + "updated_at", # computed fields "personalisation", # relationships - "service", - "job", "api_key", + "job", + "service", "template_history", ) @pre_dump - def handle_personalisation_property(self, in_data): + def handle_personalisation_property(self, in_data, **kwargs): self.personalisation = in_data.personalisation return in_data @post_dump - def handle_template_merge(self, in_data): + def handle_template_merge(self, in_data, **kwargs): in_data["template"] = in_data.pop("template_history") template = get_template_instance(in_data["template"], in_data["personalisation"]) in_data["body"] = str(template) @@ -582,10 +654,10 @@ def handle_template_merge(self, in_data): class InvitedUserSchema(BaseSchema): auth_type = field_for(models.InvitedUser, "auth_type") + created_at = FlexibleDateTime() class Meta(BaseSchema.Meta): model = models.InvitedUser - strict = True @validates("email_address") def validate_to(self, value): @@ -597,9 +669,11 @@ def validate_to(self, value): class EmailDataSchema(Schema): class Meta(BaseSchema.Meta): - strict = True + unknown = EXCLUDE email = fields.Str(required=True) + next = fields.Str(required=False) + admin_base_url = fields.Str(required=False) def __init__(self, partial_email=False): super().__init__() @@ -617,7 +691,7 @@ def validate_email(self, value): class SupportEmailDataSchema(Schema): class Meta(BaseSchema.Meta): - strict = True + unknown = EXCLUDE name = fields.Str(required=True) email = fields.Str(required=True) @@ -640,7 +714,7 @@ def validate_email(self, value): class NotificationsFilterSchema(Schema): class Meta(BaseSchema.Meta): - strict = True + unknown = EXCLUDE template_type = fields.Nested(BaseTemplateSchema, only=["template_type"], many=True) status = fields.Nested(NotificationModelSchema, only=["status"], many=True) @@ -656,7 +730,7 @@ class Meta(BaseSchema.Meta): count_pages = fields.Boolean(required=False) @pre_load - def handle_multidict(self, in_data): + def handle_multidict(self, in_data, **kwargs): if isinstance(in_data, dict) and hasattr(in_data, "getlist"): out_data = dict([(k, in_data.get(k)) for k in in_data.keys()]) if "template_type" in in_data: @@ -667,7 +741,7 @@ def handle_multidict(self, in_data): return out_data @post_load - def convert_schema_object_to_field(self, in_data): + def convert_schema_object_to_field(self, in_data, **kwargs): if "template_type" in in_data: in_data["template_type"] = [x.template_type for x in in_data["template_type"]] if "status" in in_data: @@ -684,12 +758,16 @@ def validate_page_size(self, value): class ServiceHistorySchema(Schema): + class Meta: + unknown = EXCLUDE + id = fields.UUID() name = fields.String() - created_at = fields.DateTime() - updated_at = fields.DateTime() + created_at = FlexibleDateTime() + updated_at = FlexibleDateTime() active = fields.Boolean() message_limit = fields.Integer() + sms_daily_limit = fields.Integer() restricted = fields.Boolean() email_from = fields.String() created_by_id = fields.UUID() @@ -697,24 +775,28 @@ class ServiceHistorySchema(Schema): class ApiKeyHistorySchema(Schema): + class Meta: + unknown = EXCLUDE + id = fields.UUID() name = fields.String() service_id = fields.UUID() - expiry_date = fields.DateTime() - created_at = fields.DateTime() - updated_at = fields.DateTime() + expiry_date = FlexibleDateTime() + created_at = FlexibleDateTime() + updated_at = FlexibleDateTime() created_by_id = fields.UUID() class EventSchema(BaseSchema): + created_at = FlexibleDateTime() + class Meta(BaseSchema.Meta): model = models.Event - strict = True class DaySchema(Schema): class Meta(BaseSchema.Meta): - strict = True + unknown = EXCLUDE day = fields.Date(required=True) @@ -727,7 +809,7 @@ class UnarchivedTemplateSchema(BaseSchema): archived = fields.Boolean(required=True) @validates_schema - def validate_archived(self, data): + def validate_archived(self, data, **kwargs): if data["archived"]: raise ValidationError("Template has been deleted", "template") @@ -757,6 +839,8 @@ def validate_archived(self, data): service_history_schema = ServiceHistorySchema() api_key_history_schema = ApiKeyHistorySchema() template_history_schema = TemplateHistorySchema() +template_category_schema = TemplateCategorySchema() +reduced_template_schema = ReducedTemplateSchema() event_schema = EventSchema() provider_details_schema = ProviderDetailsSchema() provider_details_history_schema = ProviderDetailsHistorySchema() diff --git a/app/service/rest.py b/app/service/rest.py index 604d0f177a..8ba79097e3 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -5,16 +5,19 @@ from notifications_utils.clients.redis import ( daily_limit_cache_key, near_daily_limit_cache_key, + near_email_daily_limit_cache_key, + near_sms_daily_limit_cache_key, over_daily_limit_cache_key, + over_email_daily_limit_cache_key, + over_sms_daily_limit_cache_key, ) -from notifications_utils.letter_timings import letter_can_be_cancelled from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy import func from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound -from app import redis_store -from app.clients.zendesk_sell import ZenDeskSell +from app import redis_store, salesforce_client +from app.clients.salesforce.salesforce_engagement import ENGAGEMENT_STAGE_LIVE from app.config import QueueNames from app.dao import fact_notification_status_dao, notifications_dao from app.dao.api_key_dao import ( @@ -49,13 +52,6 @@ dao_get_reply_to_by_service_id, update_reply_to_email_address, ) -from app.dao.service_letter_contact_dao import ( - add_letter_contact_for_service, - archive_letter_contact, - dao_get_letter_contact_by_id, - dao_get_letter_contacts_by_service_id, - update_letter_contact, -) from app.dao.service_safelist_dao import ( dao_add_and_commit_safelisted_contacts, dao_fetch_service_safelist, @@ -89,7 +85,6 @@ from app.dao.templates_dao import dao_get_template_by_id from app.dao.users_dao import get_user_by_id from app.errors import InvalidRequest, register_errors -from app.letters.utils import letter_print_day from app.models import ( KEY_TYPE_NORMAL, LETTER_TYPE, @@ -113,10 +108,7 @@ service_schema, ) from app.service import statistics -from app.service.send_notification import ( - send_one_off_notification, - send_pdf_letter_notification, -) +from app.service.send_notification import send_one_off_notification from app.service.sender import send_notification_to_service_users from app.service.service_data_retention_schema import ( add_service_data_retention_request, @@ -124,10 +116,12 @@ ) from app.service.service_senders_schema import ( add_service_email_reply_to_request, - add_service_letter_contact_block_request, add_service_sms_sender_request, ) -from app.service.utils import get_safelist_objects +from app.service.utils import ( + get_organisation_id_from_crm_org_notes, + get_safelist_objects, +) from app.user.users_schema import post_set_permissions_schema from app.utils import pagination_links @@ -185,7 +179,7 @@ def get_services(): return result else: services = dao_fetch_all_services(only_active) - data = service_schema.dump(services, many=True).data + data = service_schema.dump(services, many=True) return jsonify(data=data) @@ -202,13 +196,15 @@ def find_services_by_name(): @service_blueprint.route("/live-services-data", methods=["GET"]) def get_live_services_data(): - data = dao_fetch_live_services_data() + filter_heartbeats = request.args.get("filter_heartbeats", None) == "True" + data = dao_fetch_live_services_data(filter_heartbeats=filter_heartbeats) return jsonify(data=data) @service_blueprint.route("/delivered-notifications-stats-by-month-data", methods=["GET"]) def get_delivered_notification_stats_by_month_data(): - return jsonify(data=fetch_delivered_notification_stats_by_month()) + filter_heartbeats = request.args.get("filter_heartbeats", None) == "True" + return jsonify(data=fetch_delivered_notification_stats_by_month(filter_heartbeats=filter_heartbeats)) @service_blueprint.route("/", methods=["GET"]) @@ -218,7 +214,7 @@ def get_service_by_id(service_id): else: fetched = dao_fetch_service_by_id(service_id) - data = service_schema.dump(fetched).data + data = service_schema.dump(fetched) return jsonify(data=data) @@ -236,6 +232,9 @@ def get_service_notification_statistics(service_id): @service_blueprint.route("", methods=["POST"]) def create_service(): data = request.get_json() + data["sms_daily_limit"] = data.get( + "sms_daily_limit", 1000 + ) # TODO this is to support current admin. can remove after admin sends an sms_daily_limit if not data.get("user_id"): errors = {"user_id": ["Missing data for required field."]} @@ -250,15 +249,21 @@ def create_service(): # unpack valid json into service object valid_service = Service.from_json(data) - dao_create_service(valid_service, user) + organisation_id = ( + get_organisation_id_from_crm_org_notes(data["organisation_notes"]) + if ("organisation_notes" in data and data["organisation_notes"]) + else None + ) + + dao_create_service(valid_service, user, organisation_id=organisation_id) - try: - # try-catch; just in case, we don't want to error here - ZenDeskSell().send_create_service(valid_service, user) - except Exception as e: - current_app.logger.exception(e) + if current_app.config["FF_SALESFORCE_CONTACT"]: + try: + salesforce_client.engagement_create(valid_service, user) + except Exception as e: + current_app.logger.exception(e) - return jsonify(data=service_schema.dump(valid_service).data), 201 + return jsonify(data=service_schema.dump(valid_service)), 201 @service_blueprint.route("/", methods=["POST"]) @@ -267,12 +272,14 @@ def update_service(service_id): fetched_service = dao_fetch_service_by_id(service_id) # Capture the status change here as Marshmallow changes this later service_going_live = fetched_service.restricted and not req_json.get("restricted", True) + service_name_changed = fetched_service.name != req_json.get("name", fetched_service.name) message_limit_changed = fetched_service.message_limit != req_json.get("message_limit", fetched_service.message_limit) - current_data = dict(service_schema.dump(fetched_service).data.items()) + sms_limit_changed = fetched_service.sms_daily_limit != req_json.get("sms_daily_limit", fetched_service.sms_daily_limit) + current_data = dict(service_schema.dump(fetched_service).items()) current_data.update(request.get_json()) - service = service_schema.load(current_data).data + service = service_schema.load(current_data) if "email_branding" in req_json: email_branding_id = req_json["email_branding"] @@ -287,33 +294,44 @@ def update_service(service_id): redis_store.delete(daily_limit_cache_key(service_id)) redis_store.delete(near_daily_limit_cache_key(service_id)) redis_store.delete(over_daily_limit_cache_key(service_id)) + redis_store.delete(near_email_daily_limit_cache_key(service_id)) + redis_store.delete(over_email_daily_limit_cache_key(service_id)) if not fetched_service.restricted: _warn_service_users_about_message_limit_changed(service_id, current_data) + if sms_limit_changed: + redis_store.delete(near_sms_daily_limit_cache_key(service_id)) + redis_store.delete(over_sms_daily_limit_cache_key(service_id)) + if not fetched_service.restricted: + _warn_service_users_about_sms_limit_changed(service_id, current_data) if service_going_live: _warn_services_users_about_going_live(service_id, current_data) + if current_app.config["FF_SALESFORCE_CONTACT"]: try: - # Two scenarios, if there is a user that has requested to go live, we will use that user - # to create a user-service/contact-deal pair between notify and zendesk sell - # If by any chance there is no tracked request to a user, notify will try to identify the user - # that created the service and then create a user-service/contact-deal relationship - if service.go_live_user_id: - user = get_user_by_id(service.go_live_user_id) - else: + if service_going_live: + # Two scenarios, if there is a user that has requested to go live, we will use that user + # to create a Contact/Engagment pair between Notify and Salesforce. + # If by any chance there is no tracked request to a user, Notify will try to identify the user + # that created the service and then create a Contact/Engagment relationship. + if service.go_live_user_id: + user = get_user_by_id(service.go_live_user_id) + else: + user = dao_fetch_service_creator(service.id) + salesforce_client.engagement_update(service, user, {"StageName": ENGAGEMENT_STAGE_LIVE}) + elif service_name_changed: user = dao_fetch_service_creator(service.id) - - ZenDeskSell().send_go_live_service(service, user) + salesforce_client.engagement_update(service, user, {"Name": service.name}) except Exception as e: current_app.logger.exception(e) - return jsonify(data=service_schema.dump(fetched_service).data), 200 + return jsonify(data=service_schema.dump(fetched_service)), 200 def _warn_service_users_about_message_limit_changed(service_id, data): send_notification_to_service_users( service_id=service_id, - template_id=current_app.config["DAILY_LIMIT_UPDATED_TEMPLATE_ID"], + template_id=current_app.config["DAILY_EMAIL_LIMIT_UPDATED_TEMPLATE_ID"], personalisation={ "service_name": data["name"], "message_limit_en": "{:,}".format(data["message_limit"]), @@ -323,6 +341,19 @@ def _warn_service_users_about_message_limit_changed(service_id, data): ) +def _warn_service_users_about_sms_limit_changed(service_id, data): + send_notification_to_service_users( + service_id=service_id, + template_id=current_app.config["DAILY_SMS_LIMIT_UPDATED_TEMPLATE_ID"], + personalisation={ + "service_name": data["name"], + "message_limit_en": "{:,}".format(data["sms_daily_limit"]), + "message_limit_fr": "{:,}".format(data["sms_daily_limit"]).replace(",", " "), + }, + include_user_fields=["name"], + ) + + def _warn_services_users_about_going_live(service_id, data): send_notification_to_service_users( service_id=service_id, @@ -341,11 +372,15 @@ def _warn_services_users_about_going_live(service_id, data): @service_blueprint.route("//api-key", methods=["POST"]) def create_api_key(service_id=None): fetched_service = dao_fetch_service_by_id(service_id=service_id) - valid_api_key = api_key_schema.load(request.get_json()).data + valid_api_key = api_key_schema.load(request.get_json()) valid_api_key.service = fetched_service save_model_api_key(valid_api_key) unsigned_api_key = get_unsigned_secret(valid_api_key.id) - return jsonify(data=unsigned_api_key), 201 + + # prefix the API key so they keys can be easily identified for security scanning + keydata = {"key": unsigned_api_key, "key_name": current_app.config["API_KEY_PREFIX"] + valid_api_key.name} + + return jsonify(data=keydata), 201 @service_blueprint.route("//api-key/revoke/", methods=["POST"]) @@ -368,7 +403,7 @@ def get_api_keys(service_id, key_id=None): error = "API key not found for id: {}".format(service_id) raise InvalidRequest(error, status_code=404) - return jsonify(apiKeys=api_key_schema.dump(api_keys, many=True).data), 200 + return jsonify(apiKeys=api_key_schema.dump(api_keys, many=True)), 200 @service_blueprint.route("//users", methods=["GET"]) @@ -393,7 +428,14 @@ def add_user_to_service(service_id, user_id): folder_permissions = data.get("folder_permissions", []) dao_add_user_to_service(service, user, permissions, folder_permissions) - data = service_schema.dump(service).data + data = service_schema.dump(service) + + if current_app.config["FF_SALESFORCE_CONTACT"]: + try: + salesforce_client.engagement_add_contact_role(service, user) + except Exception as e: + current_app.logger.exception(e) + return jsonify(data=data), 201 @@ -410,6 +452,13 @@ def remove_user_from_service(service_id, user_id): raise InvalidRequest(error, status_code=400) dao_remove_user_from_service(service, user) + + if current_app.config["FF_SALESFORCE_CONTACT"]: + try: + salesforce_client.engagement_delete_contact_role(service, user) + except Exception as e: + current_app.logger.exception(e) + return jsonify({}), 204 @@ -426,12 +475,12 @@ def get_service_history(service_id): ) service_history = Service.get_history_model().query.filter_by(id=service_id).all() - service_data = service_history_schema.dump(service_history, many=True).data + service_data = service_history_schema.dump(service_history, many=True) api_key_history = ApiKey.get_history_model().query.filter_by(service_id=service_id).all() - api_keys_data = api_key_history_schema.dump(api_key_history, many=True).data + api_keys_data = api_key_history_schema.dump(api_key_history, many=True) template_history = TemplateHistory.query.filter_by(service_id=service_id).all() - template_data, errors = template_history_schema.dump(template_history, many=True) + template_data = template_history_schema.dump(template_history, many=True) data = { "service_history": service_data, @@ -445,7 +494,7 @@ def get_service_history(service_id): @service_blueprint.route("//notifications", methods=["GET"]) def get_all_notifications_for_service(service_id): - data = notifications_filter_schema.load(request.args).data + data = notifications_filter_schema.load(request.args) if data.get("to"): notification_type = data.get("template_type")[0] if data.get("template_type") else None return search_for_notification_by_to_field( @@ -481,7 +530,7 @@ def get_all_notifications_for_service(service_id): if data.get("format_for_csv"): notifications = [notification.serialize_for_csv() for notification in pagination.items] else: - notifications = notification_with_template_schema.dump(pagination.items, many=True).data + notifications = notification_with_template_schema.dump(pagination.items, many=True) return ( jsonify( notifications=notifications, @@ -495,18 +544,11 @@ def get_all_notifications_for_service(service_id): @service_blueprint.route("//notifications/", methods=["GET"]) def get_notification_for_service(service_id, notification_id): - - notification = notifications_dao.get_notification_with_personalisation( - service_id, - notification_id, - key_type=None, - ) - return ( - jsonify( - notification_with_template_schema.dump(notification).data, - ), - 200, - ) + notification = notifications_dao.get_notification_with_personalisation(service_id, notification_id, key_type=None) + if notification is not None: + return jsonify(notification_with_template_schema.dump(notification)), 200 + else: + return jsonify(result="error", message="Notification not found in database"), 404 @service_blueprint.route("//notifications//cancel", methods=["POST"]) @@ -520,13 +562,6 @@ def cancel_notification_for_service(service_id, notification_id): "Notification cannot be cancelled - only letters can be cancelled", status_code=400, ) - elif not letter_can_be_cancelled(notification.status, notification.created_at): - print_day = letter_print_day(notification.created_at) - - raise InvalidRequest( - "It’s too late to cancel this letter. Printing started {} at 5.30pm".format(print_day), - status_code=400, - ) updated_notification = notifications_dao.update_notification_status_by_id( notification_id, @@ -534,7 +569,7 @@ def cancel_notification_for_service(service_id, notification_id): ) return ( - jsonify(notification_with_template_schema.dump(updated_notification).data), + jsonify(notification_with_template_schema.dump(updated_notification)), 200, ) @@ -547,7 +582,7 @@ def search_for_notification_by_to_field(service_id, search_term, statuses, notif notification_type=notification_type, ) return ( - jsonify(notifications=notification_with_template_schema.dump(results, many=True).data), + jsonify(notifications=notification_with_template_schema.dump(results, many=True)), 200, ) @@ -581,7 +616,7 @@ def get_detailed_service(service_id, today_only=False): service = dao_fetch_service_by_id(service_id) service.statistics = get_service_statistics(service_id, today_only) - return detailed_service_schema.dump(service).data + return detailed_service_schema.dump(service) def get_service_statistics(service_id, today_only, limit_days=7): @@ -598,7 +633,6 @@ def get_detailed_services(start_date, end_date, only_active=False, include_from_ if start_date == datetime.utcnow().date(): stats = dao_fetch_todays_stats_for_all_services(include_from_test_key=include_from_test_key, only_active=only_active) else: - stats = fetch_stats_for_all_services_by_date_range( start_date=start_date, end_date=end_date, @@ -667,6 +701,11 @@ def archive_service(service_id): if service.active: dao_archive_service(service.id) + if current_app.config["FF_SALESFORCE_CONTACT"]: + try: + salesforce_client.engagement_close(service) + except Exception as e: + current_app.logger.exception(e) return "", 204 @@ -734,8 +773,7 @@ def create_one_off_notification(service_id): @service_blueprint.route("//send-pdf-letter", methods=["POST"]) def create_pdf_letter(service_id): - resp = send_pdf_letter_notification(service_id, request.get_json()) - return jsonify(resp), 201 + pass @service_blueprint.route("//email-reply-to", methods=["GET"]) @@ -752,7 +790,7 @@ def get_email_reply_to_address(service_id, reply_to_id): @service_blueprint.route("//email-reply-to/verify", methods=["POST"]) def verify_reply_to_email_address(service_id): - email_address, errors = email_data_request_schema.load(request.get_json()) + email_address = email_data_request_schema.load(request.get_json()) check_if_reply_to_address_already_in_use(service_id, email_address["email"]) template = dao_get_template_by_id(current_app.config["REPLY_TO_EMAIL_ADDRESS_VERIFICATION_TEMPLATE_ID"]) notify_service = Service.query.get(current_app.config["NOTIFY_SERVICE_ID"]) @@ -813,41 +851,22 @@ def delete_service_reply_to_email_address(service_id, reply_to_email_id): @service_blueprint.route("//letter-contact", methods=["GET"]) def get_letter_contacts(service_id): - result = dao_get_letter_contacts_by_service_id(service_id) - return jsonify([i.serialize() for i in result]), 200 + pass @service_blueprint.route("//letter-contact/", methods=["GET"]) def get_letter_contact_by_id(service_id, letter_contact_id): - result = dao_get_letter_contact_by_id(service_id=service_id, letter_contact_id=letter_contact_id) - return jsonify(result.serialize()), 200 + pass @service_blueprint.route("//letter-contact", methods=["POST"]) def add_service_letter_contact(service_id): - # validate the service exists, throws ResultNotFound exception. - dao_fetch_service_by_id(service_id) - form = validate(request.get_json(), add_service_letter_contact_block_request) - new_letter_contact = add_letter_contact_for_service( - service_id=service_id, - contact_block=form["contact_block"], - is_default=form.get("is_default", True), - ) - return jsonify(data=new_letter_contact.serialize()), 201 + pass @service_blueprint.route("//letter-contact/", methods=["POST"]) def update_service_letter_contact(service_id, letter_contact_id): - # validate the service exists, throws ResultNotFound exception. - dao_fetch_service_by_id(service_id) - form = validate(request.get_json(), add_service_letter_contact_block_request) - new_reply_to = update_letter_contact( - service_id=service_id, - letter_contact_id=letter_contact_id, - contact_block=form["contact_block"], - is_default=form.get("is_default", True), - ) - return jsonify(data=new_reply_to.serialize()), 200 + pass @service_blueprint.route( @@ -855,9 +874,7 @@ def update_service_letter_contact(service_id, letter_contact_id): methods=["POST"], ) def delete_service_letter_contact(service_id, letter_contact_id): - archived_letter_contact = archive_letter_contact(service_id, letter_contact_id) - - return jsonify(data=archived_letter_contact.serialize()), 200 + pass @service_blueprint.route("//sms-sender", methods=["POST"]) diff --git a/app/service/send_notification.py b/app/service/send_notification.py index f71eb4e7d2..77cd1313e3 100644 --- a/app/service/send_notification.py +++ b/app/service/send_notification.py @@ -4,6 +4,7 @@ from sqlalchemy.orm.exc import NoResultFound from app import create_random_identifier +from app.config import Priorities, QueueNames from app.dao.notifications_dao import _update_notification_status from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id @@ -29,13 +30,19 @@ from app.notifications.process_notifications import ( persist_notification, send_notification_to_queue, + simulated_recipient, ) from app.notifications.validators import ( + check_email_daily_limit, check_service_has_permission, check_service_over_daily_message_limit, + check_sms_daily_limit, + increment_email_daily_count_send_warnings_if_needed, + increment_sms_daily_count_send_warnings_if_needed, validate_and_format_recipient, validate_template, ) +from app.utils import get_delivery_queue_for_template from app.v2.errors import BadRequestError @@ -58,9 +65,14 @@ def send_one_off_notification(service_id, post_data): personalisation = post_data.get("personalisation", None) - validate_template(template.id, personalisation, service, template.template_type) + _, template_with_content = validate_template(template.id, personalisation, service, template.template_type) - check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) + if template.template_type == SMS_TYPE: + is_test_notification = simulated_recipient(post_data["to"], template.template_type) + if not is_test_notification: + check_sms_daily_limit(service, 1) + elif template.template_type == EMAIL_TYPE: + check_email_daily_limit(service, 1) # 1 email validate_and_format_recipient( send_to=post_data["to"], @@ -72,6 +84,13 @@ def send_one_off_notification(service_id, post_data): validate_created_by(service, post_data["created_by"]) + if template.template_type == SMS_TYPE: + is_test_notification = simulated_recipient(post_data["to"], template.template_type) + if not is_test_notification: + increment_sms_daily_count_send_warnings_if_needed(service, 1) + elif template.template_type == EMAIL_TYPE: + increment_email_daily_count_send_warnings_if_needed(service, 1) # 1 email + sender_id = post_data.get("sender_id", None) reply_to = get_reply_to_text( notification_type=template.template_type, @@ -100,10 +119,15 @@ def send_one_off_notification(service_id, post_data): NOTIFICATION_DELIVERED, ) else: + # allow one-off sends from admin to go quicker by using normal queue instead of bulk queue + queue = get_delivery_queue_for_template(template) + if queue == QueueNames.DELIVERY_QUEUES[template.template_type][Priorities.LOW]: + queue = QueueNames.DELIVERY_QUEUES[template.template_type][Priorities.MEDIUM] + send_notification_to_queue( notification=notification, research_mode=service.research_mode, - queue=template.queue_to_use(), + queue=queue, ) return {"id": str(notification.id)} diff --git a/app/service/utils.py b/app/service/utils.py index 1b7cc2f377..45cf4ed444 100644 --- a/app/service/utils.py +++ b/app/service/utils.py @@ -1,7 +1,14 @@ import itertools +import json +from typing import Optional +import requests +from flask import current_app from notifications_utils.recipients import allowed_to_send_to +from sqlalchemy.orm.exc import NoResultFound +from app.dao.organisation_dao import dao_get_organisation_by_id +from app.dao.service_data_retention_dao import insert_service_data_retention from app.models import ( EMAIL_TYPE, KEY_TYPE_NORMAL, @@ -10,6 +17,7 @@ MOBILE_TYPE, ServiceSafelist, ) +from app.variables import PT_DATA_RETENTION_DAYS def get_recipients_from_request(request_json, key, type): @@ -27,13 +35,18 @@ def get_safelist_objects(service_id, request_json): def service_allowed_to_send_to(recipient, service, key_type, allow_safelisted_recipients=True): - members = safelisted_members(service, key_type, allow_safelisted_recipients) + is_simulated = False + if recipient in current_app.config["SIMULATED_EMAIL_ADDRESSES"] or recipient in current_app.config["SIMULATED_SMS_NUMBERS"]: + is_simulated = True + + members = safelisted_members(service, key_type, is_simulated, allow_safelisted_recipients) if members is None: return True + return allowed_to_send_to(recipient, members) -def safelisted_members(service, key_type, allow_safelisted_recipients=True): +def safelisted_members(service, key_type, is_simulated=False, allow_safelisted_recipients=True): if key_type == KEY_TYPE_TEST: return None @@ -41,7 +54,70 @@ def safelisted_members(service, key_type, allow_safelisted_recipients=True): return None team_members = itertools.chain.from_iterable([user.mobile_number, user.email_address] for user in service.users) - safelist_members = [member.recipient for member in service.safelist if allow_safelisted_recipients] + safelist_members = [] + + if is_simulated: + safelist_members = itertools.chain.from_iterable( + [current_app.config["SIMULATED_SMS_NUMBERS"], current_app.config["SIMULATED_EMAIL_ADDRESSES"]] + ) + else: + safelist_members = [member.recipient for member in service.safelist if allow_safelisted_recipients] if (key_type == KEY_TYPE_NORMAL and service.restricted) or (key_type == KEY_TYPE_TEAM): return itertools.chain(team_members, safelist_members) + + +def get_gc_organisation_data() -> list[dict]: + "Returns the dataset from the gc-organisations repo" + response = requests.get( + current_app.config["CRM_ORG_LIST_URL"], + headers={"Authorization": f'token {current_app.config["CRM_GITHUB_PERSONAL_ACCESS_TOKEN"]}'}, + ) + response.raise_for_status() + + account_data = json.loads(response.text) + return account_data + + +def get_organisation_id_from_crm_org_notes(org_notes: str) -> Optional[str]: + """Returns the notify_organisation_id if one exists for the organisation name + in the org_notes string + """ + if ">" not in org_notes: + return None + + # this is like: "Department of Silly Walks > Unit 2" + organisation_name = org_notes.split(">")[0].strip() + + gc_org_data = get_gc_organisation_data() + + # create 2 dicts that map english and french org names to the notify organisation_id + en_dict = {} + fr_dict = {} + for item in gc_org_data: + en_dict[item["name_eng"]] = item["notify_organisation_id"] + fr_dict[item["name_fra"]] = item["notify_organisation_id"] + + # find the org name in the list + if organisation_name in en_dict: + return en_dict[organisation_name] + if organisation_name in fr_dict: + return fr_dict[organisation_name] + return None + + +def add_pt_data_retention(service_id): + try: + insert_service_data_retention(service_id, "email", PT_DATA_RETENTION_DAYS) + insert_service_data_retention(service_id, "sms", PT_DATA_RETENTION_DAYS) + except Exception as e: + current_app.logger.error(f"Error setting data retention for service: {service_id}, Error: {e}") + + +def get_organisation_by_id(organisation_id): + try: + organisation = dao_get_organisation_by_id(organisation_id) + except NoResultFound: + current_app.logger.warning(f"Could not find organisation with id {organisation_id}") + return None + return organisation diff --git a/app/sms_fragment_utils.py b/app/sms_fragment_utils.py new file mode 100644 index 0000000000..0b7bdb152f --- /dev/null +++ b/app/sms_fragment_utils.py @@ -0,0 +1,29 @@ +from datetime import timedelta +from uuid import UUID + +from flask import current_app +from notifications_utils.clients.redis import sms_daily_count_cache_key + +from app import redis_store +from app.dao.services_dao import fetch_todays_total_sms_count + + +def fetch_todays_requested_sms_count(service_id: UUID) -> int: + if not current_app.config["REDIS_ENABLED"]: + return fetch_todays_total_sms_count(service_id) + + cache_key = sms_daily_count_cache_key(service_id) + sms_count = redis_store.get(cache_key) + if sms_count is None: + sms_count = fetch_todays_total_sms_count(service_id) + redis_store.set(cache_key, sms_count, ex=int(timedelta(hours=2).total_seconds())) + return int(sms_count) + + +def increment_todays_requested_sms_count(service_id: UUID, increment_by: int): + if not current_app.config["REDIS_ENABLED"]: + return + + fetch_todays_requested_sms_count(service_id) # to make sure it's set in redis + cache_key = sms_daily_count_cache_key(service_id) + redis_store.incrby(cache_key, increment_by) diff --git a/app/status/healthcheck.py b/app/status/healthcheck.py index c63de46ae9..3d73cdc297 100644 --- a/app/status/healthcheck.py +++ b/app/status/healthcheck.py @@ -1,3 +1,5 @@ +from datetime import datetime, timezone + from flask import Blueprint, jsonify, request from app import db, version @@ -15,6 +17,7 @@ def show_status(): else: return ( jsonify( + current_time_utc=str(str(datetime.now(timezone.utc))), status="ok", # This should be considered part of the public API commit_sha=version.__commit_sha__, build_time=version.__time__, diff --git a/app/template/rest.py b/app/template/rest.py index ff5a2bbd3e..789ffbefa3 100644 --- a/app/template/rest.py +++ b/app/template/rest.py @@ -3,9 +3,13 @@ import botocore from flask import Blueprint, current_app, jsonify, request -from notifications_utils import SMS_CHAR_COUNT_LIMIT +from notifications_utils import ( + EMAIL_CHAR_COUNT_LIMIT, + SMS_CHAR_COUNT_LIMIT, + TEMPLATE_NAME_CHAR_COUNT_LIMIT, +) from notifications_utils.pdf import extract_page_from_pdf -from notifications_utils.template import SMSMessageTemplate +from notifications_utils.template import HTMLEmailTemplate, SMSMessageTemplate from PyPDF2.utils import PdfReadError from requests import post as requests_post from sqlalchemy.orm.exc import NoResultFound @@ -21,15 +25,28 @@ dao_get_template_versions, dao_redact_template, dao_update_template, + dao_update_template_category, + dao_update_template_process_type, dao_update_template_reply_to, get_precompiled_letter_template, ) from app.errors import InvalidRequest, register_errors from app.letters.utils import get_letter_pdf -from app.models import LETTER_TYPE, SECOND_CLASS, SMS_TYPE, Template +from app.models import ( + EMAIL_TYPE, + LETTER_TYPE, + SECOND_CLASS, + SMS_TYPE, + Organisation, + Template, +) from app.notifications.validators import check_reply_to, service_has_permission from app.schema_validation import validate -from app.schemas import template_history_schema, template_schema +from app.schemas import ( + reduced_template_schema, + template_history_schema, + template_schema, +) from app.template.template_schemas import post_create_template_schema from app.utils import get_public_notify_type_text, get_template_instance @@ -39,10 +56,19 @@ def _content_count_greater_than_limit(content, template_type): - if template_type != SMS_TYPE: - return False - template = SMSMessageTemplate({"content": content, "template_type": template_type}) - return template.content_count > SMS_CHAR_COUNT_LIMIT + if template_type == EMAIL_TYPE: + template = HTMLEmailTemplate({"content": content, "subject": "placeholder", "template_type": template_type}) + return template.is_message_too_long() + if template_type == SMS_TYPE: + template = SMSMessageTemplate({"content": content, "template_type": template_type}) + return template.is_message_too_long() + return False + + +def _template_name_over_char_limit(name, content, template_type): + return HTMLEmailTemplate( + {"name": name, "content": content, "subject": "placeholder", "template_type": template_type} + ).is_name_too_long() def validate_parent_folder(template_json): @@ -58,11 +84,20 @@ def validate_parent_folder(template_json): return None +def should_template_be_redacted(organisation: Organisation) -> bool: + try: + return organisation.organisation_type == "province_or_territory" + except AttributeError: + current_app.logger.info("Service has no linked organisation") + return False + + @template_blueprint.route("", methods=["POST"]) def create_template(service_id): fetched_service = dao_fetch_service_by_id(service_id=service_id) # permissions needs to be placed here otherwise marshmallow will interfere with versioning permissions = fetched_service.permissions + organisation = fetched_service.organisation template_json = validate(request.get_json(), post_create_template_schema) folder = validate_parent_folder(template_json=template_json) new_template = Template.from_json(template_json, folder) @@ -79,15 +114,46 @@ def create_template(service_id): over_limit = _content_count_greater_than_limit(new_template.content, new_template.template_type) if over_limit: - message = "Content has a character count greater than the limit of {}".format(SMS_CHAR_COUNT_LIMIT) + char_limit = SMS_CHAR_COUNT_LIMIT if new_template.template_type == SMS_TYPE else EMAIL_CHAR_COUNT_LIMIT + message = "Content has a character count greater than the limit of {}".format(char_limit) errors = {"content": [message]} + current_app.logger.warning( + {"error": f"{new_template.template_type}_char_count_exceeded", "message": message, "service_id": service_id} + ) + raise InvalidRequest(errors, status_code=400) + + if _template_name_over_char_limit(new_template.name, new_template.content, new_template.template_type): + message = "Template name must be less than {} characters".format(TEMPLATE_NAME_CHAR_COUNT_LIMIT) + errors = {"name": [message]} + current_app.logger.warning( + {"error": f"{new_template.template_type}_name_char_count_exceeded", "message": message, "service_id": service_id} + ) raise InvalidRequest(errors, status_code=400) check_reply_to(service_id, new_template.reply_to, new_template.template_type) - dao_create_template(new_template) + redact_personalisation = should_template_be_redacted(organisation) + dao_create_template(new_template, redact_personalisation=redact_personalisation) + + return jsonify(data=template_schema.dump(new_template)), 201 + + +@template_blueprint.route("//category/", methods=["POST"]) +def update_templates_category(service_id, template_id, template_category_id): + updated = dao_update_template_category(template_id, template_category_id) + return jsonify(data=template_schema.dump(updated)), 200 - return jsonify(data=template_schema.dump(new_template).data), 201 + +@template_blueprint.route("//process-type", methods=["POST"]) +def update_template_process_type(template_id): + data = request.get_json() + if "process_type" not in data: + message = "Field is required" + errors = {"process_type": [message]} + raise InvalidRequest(errors, status_code=400) + + updated = dao_update_template_process_type(template_id=template_id, process_type=data.get("process_type")) + return jsonify(data=template_schema.dump(updated)), 200 @template_blueprint.route("/", methods=["POST"]) @@ -109,32 +175,58 @@ def update_template(service_id, template_id): if "reply_to" in data: check_reply_to(service_id, data.get("reply_to"), fetched_template.template_type) updated = dao_update_template_reply_to(template_id=template_id, reply_to=data.get("reply_to")) - return jsonify(data=template_schema.dump(updated).data), 200 + return jsonify(data=template_schema.dump(updated)), 200 - current_data = dict(template_schema.dump(fetched_template).data.items()) - updated_template = dict(template_schema.dump(fetched_template).data.items()) + current_data = dict(template_schema.dump(fetched_template).items()) + updated_template = dict(template_schema.dump(fetched_template).items()) updated_template.update(data) # Check if there is a change to make. if _template_has_not_changed(current_data, updated_template): return jsonify(data=updated_template), 200 - over_limit = _content_count_greater_than_limit(updated_template["content"], fetched_template.template_type) - if over_limit: - message = "Content has a character count greater than the limit of {}".format(SMS_CHAR_COUNT_LIMIT) + content_over_limit = _content_count_greater_than_limit(updated_template["content"], fetched_template.template_type) + name_over_limit = _template_name_over_char_limit( + updated_template["name"], updated_template["content"], fetched_template.template_type + ) + if content_over_limit: + char_limit = SMS_CHAR_COUNT_LIMIT if fetched_template.template_type == SMS_TYPE else EMAIL_CHAR_COUNT_LIMIT + message = "Content has a character count greater than the limit of {}".format(char_limit) errors = {"content": [message]} + current_app.logger.warning( + {"error": f"{fetched_template.template_type}_char_count_exceeded", "message": message, "template_id": template_id} + ) raise InvalidRequest(errors, status_code=400) - update_dict = template_schema.load(updated_template).data + if name_over_limit: + message = "Template name must be less than {} characters".format(TEMPLATE_NAME_CHAR_COUNT_LIMIT) + errors = {"name": [message]} + current_app.logger.warning( + { + "error": f"{fetched_template.template_type}_name_char_count_exceeded", + "message": message, + "template_id": template_id, + } + ) + raise InvalidRequest(errors, status_code=400) + + # if the template category is changing, set the process_type to None to remove any priority override + if current_app.config["FF_TEMPLATE_CATEGORY"]: + if updated_template["template_category_id"] != str(fetched_template.template_category_id): + updated_template["process_type"] = None + + update_dict = template_schema.load(updated_template) + if update_dict.archived: + update_dict.folder = None dao_update_template(update_dict) - return jsonify(data=template_schema.dump(update_dict).data), 200 + return jsonify(data=template_schema.dump(update_dict)), 200 @template_blueprint.route("/precompiled", methods=["GET"]) def get_precompiled_template_for_service(service_id): template = get_precompiled_letter_template(service_id) - template_dict = template_schema.dump(template).data + template_dict = template_schema.dump(template) return jsonify(template_dict), 200 @@ -142,21 +234,21 @@ def get_precompiled_template_for_service(service_id): @template_blueprint.route("", methods=["GET"]) def get_all_templates_for_service(service_id): templates = dao_get_all_templates_for_service(service_id=service_id) - data = template_schema.dump(templates, many=True).data + data = reduced_template_schema.dump(templates, many=True) return jsonify(data=data) @template_blueprint.route("/", methods=["GET"]) def get_template_by_id_and_service_id(service_id, template_id): fetched_template = dao_get_template_by_id_and_service_id(template_id=template_id, service_id=service_id) - data = template_schema.dump(fetched_template).data + data = template_schema.dump(fetched_template) return jsonify(data=data) @template_blueprint.route("//preview", methods=["GET"]) def preview_template_by_id_and_service_id(service_id, template_id): fetched_template = dao_get_template_by_id_and_service_id(template_id=template_id, service_id=service_id) - data = template_schema.dump(fetched_template).data + data = template_schema.dump(fetched_template) template_object = get_template_instance(data, values=request.args.to_dict()) if template_object.missing_data: @@ -174,7 +266,7 @@ def preview_template_by_id_and_service_id(service_id, template_id): def get_template_version(service_id, template_id, version): data = template_history_schema.dump( dao_get_template_by_id_and_service_id(template_id=template_id, service_id=service_id, version=version) - ).data + ) return jsonify(data=data) @@ -183,14 +275,14 @@ def get_template_versions(service_id, template_id): data = template_history_schema.dump( dao_get_template_versions(service_id=service_id, template_id=template_id), many=True, - ).data + ) return jsonify(data=data) def _template_has_not_changed(current_data, updated_template): return all( current_data[key] == updated_template[key] - for key in ("name", "content", "subject", "archived", "process_type", "postage") + for key in ("name", "content", "subject", "archived", "process_type", "postage", "template_category_id") ) @@ -220,7 +312,6 @@ def preview_letter_template_by_notification_id(service_id, notification_id, file if template.is_precompiled_letter: try: - pdf_file = get_letter_pdf(notification) except botocore.exceptions.ClientError as e: @@ -263,7 +354,6 @@ def preview_letter_template_by_notification_id(service_id, notification_id, file else: response_content = content else: - template_for_letter_print = { "id": str(notification.template_id), "subject": template.subject, diff --git a/app/template/template_category_rest.py b/app/template/template_category_rest.py new file mode 100644 index 0000000000..dd8ea98088 --- /dev/null +++ b/app/template/template_category_rest.py @@ -0,0 +1,97 @@ +from flask import Blueprint, jsonify, request + +from app.dao.template_categories_dao import ( + dao_create_template_category, + dao_delete_template_category_by_id, + dao_get_all_template_categories, + dao_get_template_category_by_id, + dao_get_template_category_by_template_id, + dao_update_template_category, +) +from app.errors import register_errors +from app.models import TemplateCategory +from app.schemas import template_category_schema + +template_category_blueprint = Blueprint( + "template_category", + __name__, + url_prefix="/template-category", +) + +register_errors(template_category_blueprint) + + +@template_category_blueprint.route("", methods=["POST"]) +def create_template_category(): + data = request.get_json() + + template_category_schema.load(data) + template_category = TemplateCategory.from_json(data) + + dao_create_template_category(template_category) + + return jsonify(template_category=template_category_schema.dump(template_category)), 201 + + +@template_category_blueprint.route("/", methods=["GET"]) +def get_template_category(template_category_id): + template_category = dao_get_template_category_by_id(template_category_id) + return jsonify(template_category=template_category_schema.dump(template_category)), 200 + + +@template_category_blueprint.route("/by-template-id/", methods=["GET"]) +def get_template_category_by_template_id(template_id): + template_category = dao_get_template_category_by_template_id(template_id) + return jsonify(template_category=template_category_schema.dump(template_category)), 200 + + +@template_category_blueprint.route("", methods=["GET"]) +def get_template_categories(): + template_type = request.args.get("template_type", None) + + hidden = request.args.get("hidden") + if hidden is not None: + if hidden == "True": + hidden = True + elif hidden == "False": + hidden = False + else: + hidden = None + + # Validate request args + if template_type is not None: + if template_type not in ["sms", "email"]: + return jsonify(message="Invalid filter 'template_type', valid template_types: 'sms', 'email'"), 400 + + template_categories = template_category_schema.dump(dao_get_all_template_categories(template_type, hidden), many=True) + return jsonify(template_categories=template_categories), 200 + + +@template_category_blueprint.route("/", methods=["POST"]) +def update_template_category(template_category_id): + current_category = dict(template_category_schema.dump(dao_get_template_category_by_id(template_category_id))) + current_category.update(request.get_json()) + + updated_category = template_category_schema.load(current_category) + dao_update_template_category(updated_category) + + return jsonify(template_category=template_category_schema.dump(updated_category)), 200 + + +@template_category_blueprint.route("/", methods=["DELETE"]) +def delete_template_category(template_category_id): + """Deletes a template category. By default, if the template category is associated with any template, it will not be deleted. + This can be overriden by specifying the `cascade` query parameter. + + Args: + template_category_id (str): The id of the template_category to delete + + Request Args: + cascade (bool, optional): Specify whether to dissociate the category from templates that use it to force removal. Defaults to False. + + Returns: + (flask.Response): The response message and http status code. + """ + cascade = True if request.args.get("cascade") == "True" else False + dao_delete_template_category_by_id(template_category_id, cascade=cascade) + return "", 204 diff --git a/app/template_statistics/rest.py b/app/template_statistics/rest.py index eead79e539..deecf76132 100644 --- a/app/template_statistics/rest.py +++ b/app/template_statistics/rest.py @@ -55,6 +55,6 @@ def get_template_statistics_for_template_id(service_id, template_id): data = None notification = dao_get_last_template_usage(template_id, template.template_type, template.service_id) if notification: - data = notification_with_template_schema.dump(notification).data + data = notification_with_template_schema.dump(notification) return jsonify(data=data) diff --git a/app/types.py b/app/types.py new file mode 100644 index 0000000000..1bbb7c3fc7 --- /dev/null +++ b/app/types.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import Optional + +from app.encryption import NotificationDictToSign +from app.models import Job, NotificationType, Service + + +class VerifiedNotification(NotificationDictToSign): + service: Service + notification_id: str + template_id: str + recipient: str # to + notification_type: NotificationType + api_key_id: Optional[str] # notification.get("api_key", None) + created_at: datetime + job_id: Optional[Job] + job_row_number: Optional[int] diff --git a/app/user/contact_request.py b/app/user/contact_request.py index 7317a69302..3473db150a 100644 --- a/app/user/contact_request.py +++ b/app/user/contact_request.py @@ -16,6 +16,7 @@ class ContactRequest: name: str = field(default="") message: str = field(default="") user_profile: str = field(default="") + organisation_id: str = field(default="") department_org_name: str = field(default="") program_service_name: str = field(default="") intended_recipients: str = field(default="") @@ -30,6 +31,12 @@ class ContactRequest: notification_types: str = field(default="") expected_volume: str = field(default="") branding_url: str = field(default="") + branding_logo_name: str = field(default="") + alt_text_en: str = field(default="") + alt_text_fr: str = field(default="") + template_category_name_en: str = field(default="") + template_category_name_fr: str = field(default="") + template_id_link: str = field(default="") def __post_init__(self): # email address is mandatory for us @@ -52,3 +59,6 @@ def is_go_live_request(self): def is_branding_request(self): return "branding_request" in self.support_type.lower() + + def is_new_template_category_request(self): + return "new_template_category_request" in self.support_type.lower() diff --git a/app/user/rest.py b/app/user/rest.py index e8ce2e4b82..1d1918a71c 100644 --- a/app/user/rest.py +++ b/app/user/rest.py @@ -12,9 +12,9 @@ from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound +from app import salesforce_client from app.clients.freshdesk import Freshdesk -from app.clients.zendesk import Zendesk -from app.clients.zendesk_sell import ZenDeskSell +from app.clients.salesforce.salesforce_engagement import ENGAGEMENT_STAGE_ACTIVATION from app.config import Config, QueueNames from app.dao.fido2_key_dao import ( create_fido2_session, @@ -27,7 +27,7 @@ from app.dao.login_event_dao import list_login_events, save_login_event from app.dao.permissions_dao import permission_dao from app.dao.service_user_dao import dao_get_service_user, dao_update_service_user -from app.dao.services_dao import dao_fetch_service_by_id +from app.dao.services_dao import dao_fetch_service_by_id, dao_update_service from app.dao.template_folder_dao import dao_get_template_folder_by_id_and_service_id from app.dao.templates_dao import dao_get_template_by_id from app.dao.users_dao import ( @@ -105,19 +105,14 @@ def handle_integrity_error(exc): @user_blueprint.route("", methods=["POST"]) def create_user(): - # import pdb; pdb.set_trace() - user_to_create, errors = create_user_schema.load(request.get_json()) req_json = request.get_json() + user_to_create = create_user_schema.load(req_json) password = req_json.get("password", None) - if not password: - errors.update({"password": ["Missing data for required field."]}) + response = pwnedpasswords.check(password) + if response > 0: + errors = {"password": ["Password is not allowed."]} raise InvalidRequest(errors, status_code=400) - else: - response = pwnedpasswords.check(password) - if response > 0: - errors.update({"password": ["Password is not allowed."]}) - raise InvalidRequest(errors, status_code=400) save_model_user(user_to_create, pwd=req_json.get("password")) result = user_to_create.serialize() @@ -133,9 +128,7 @@ def update_user_attribute(user_id): else: updated_by = None - update_dct, errors = user_update_schema_load_json.load(req_json) - if errors: - raise InvalidRequest(errors, status_code=400) + update_dct = user_update_schema_load_json.load(req_json) save_user_attribute(user_to_update, update_dict=update_dct) @@ -179,6 +172,13 @@ def update_user_attribute(user_id): send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) + if current_app.config["FF_SALESFORCE_CONTACT"]: + try: + updated_user = get_user_by_id(user_id=user_id) + salesforce_client.contact_update(updated_user) + except Exception as e: + current_app.logger.exception(e) + return jsonify(data=user_to_update.serialize()), 200 @@ -198,6 +198,10 @@ def activate_user(user_id): user.state = "active" save_model_user(user) + + if current_app.config["FF_SALESFORCE_CONTACT"]: + salesforce_client.contact_create(user) + return jsonify(data=user.serialize()), 200 @@ -226,7 +230,12 @@ def verify_user_password(user_id): return jsonify({}), 204 else: increment_failed_login_count(user_to_verify) - message = "Incorrect password" + if user_to_verify.failed_login_count >= current_app.config["FAILED_LOGIN_LIMIT"]: + message = "Failed login: Incorrect password for user_id {user_id} failed_login {failed_login_count} times".format( + user_id=user_id, failed_login_count=user_to_verify.failed_login_count + ) + else: + message = "Incorrect password for user_id {user_id}".format(user_id=user_id) errors = {"password": [message]} raise InvalidRequest(errors, status_code=400) @@ -350,9 +359,7 @@ def create_2fa_code(template_id, user_to_send_to, secret_code, recipient, person @user_blueprint.route("//change-email-verification", methods=["POST"]) def send_user_confirm_new_email(user_id): user_to_send_to = get_user_by_id(user_id=user_id) - email, errors = email_data_request_schema.load(request.get_json()) - if errors: - raise InvalidRequest(message=errors, status_code=400) + email = email_data_request_schema.load(request.get_json()) template = dao_get_template_by_id(current_app.config["CHANGE_EMAIL_CONFIRMATION_TEMPLATE_ID"]) service = Service.query.get(current_app.config["NOTIFY_SERVICE_ID"]) @@ -407,7 +414,7 @@ def send_new_user_email_verification(user_id): @user_blueprint.route("//email-already-registered", methods=["POST"]) def send_already_registered_email(user_id): - to, errors = email_data_request_schema.load(request.get_json()) + to = email_data_request_schema.load(request.get_json()) template = dao_get_template_by_id(current_app.config["ALREADY_REGISTERED_EMAIL_TEMPLATE_ID"]) service = Service.query.get(current_app.config["NOTIFY_SERVICE_ID"]) @@ -434,14 +441,14 @@ def send_already_registered_email(user_id): @user_blueprint.route("//contact-request", methods=["POST"]) def send_contact_request(user_id): - contact = None user = None try: contact = ContactRequest(**request.json) user = get_user_by_email(contact.email_address) - if not any([not s.restricted for s in user.services]): + # If the user has no live services, don't want to escalate the ticket. + if all([s.restricted for s in user.services]): contact.tags = ["z_skip_opsgenie", "z_skip_urgent_escalation"] except TypeError as e: @@ -451,22 +458,27 @@ def send_contact_request(user_id): # This is perfectly normal if get_user_by_email raises pass - try: - if contact.is_go_live_request(): + # Update the engagement stage in Salesforce for go live requests + if contact and contact.is_go_live_request() and current_app.config["FF_SALESFORCE_CONTACT"]: + try: + engagement_updates = {"StageName": ENGAGEMENT_STAGE_ACTIVATION, "Description": contact.main_use_case} service = dao_fetch_service_by_id(contact.service_id) - ZenDeskSell().send_go_live_request(service, user, contact) - else: - ZenDeskSell().send_contact_request(contact) - except Exception as e: - current_app.logger.exception(e) - - if contact.is_demo_request(): - return jsonify({}), 204 - - try: - Zendesk(contact).send_ticket() - except Exception as e: - current_app.logger.exception(e) + salesforce_client.engagement_update(service, user, engagement_updates) + + if not service.organisation_notes: + # the service was created before we started requesting the organisation name at creation time + if not contact.department_org_name: + # this shouldn't happen, but if it does, we don't want to leave the service with no organisation name + contact.department_org_name = "Unknown" + # fall back on the organisation name collected from the go live request + service.organisation_notes = contact.department_org_name + dao_update_service(service) + else: + # this is the normal case, where the service has an organisation name collected when it was created + contact.department_org_name = service.organisation_notes + + except Exception as e: + current_app.logger.exception(e) status_code = Freshdesk(contact).send_ticket() return jsonify({"status_code": status_code}), 204 @@ -474,7 +486,6 @@ def send_contact_request(user_id): @user_blueprint.route("//branding-request", methods=["POST"]) def send_branding_request(user_id): - contact = None data = request.json try: @@ -486,7 +497,12 @@ def send_branding_request(user_id): email_address=user.email_address, service_id=data["serviceID"], service_name=data["service_name"], + organisation_id=data["organisation_id"], + department_org_name=data["organisation_name"], branding_url=get_logo_url(data["filename"]), + branding_logo_name=data["branding_logo_name"] if "branding_logo_name" in data else "", + alt_text_en=data["alt_text_en"], + alt_text_fr=data["alt_text_fr"], ) contact.tags = ["z_skip_opsgenie", "z_skip_urgent_escalation"] @@ -498,10 +514,35 @@ def send_branding_request(user_id): current_app.logger.error(e) return jsonify({}), 400 + status_code = Freshdesk(contact).send_ticket() + return jsonify({"status_code": status_code}), 204 + + +@user_blueprint.route("//new-template-category-request", methods=["POST"]) +def send_new_template_category_request(user_id): + contact = None + data = request.json try: - Zendesk(contact).send_ticket() - except Exception as e: - current_app.logger.exception(e) + user = get_user_by_id(user_id=user_id) + contact = ContactRequest( + support_type="new_template_category_request", + friendly_support_type="New template category request", + name=user.name, + email_address=user.email_address, + service_id=data["service_id"], + template_category_name_en=data["template_category_name_en"], + template_category_name_fr=data["template_category_name_fr"], + template_id_link=f"https://{current_app.config['ADMIN_BASE_URL']}/services/{data['service_id']}/templates/{data['template_id']}", + ) + contact.tags = ["z_skip_opsgenie", "z_skip_urgent_escalation"] + + except TypeError as e: + current_app.logger.error(e) + return jsonify({}), 400 + except NoResultFound as e: + # This means that get_user_by_id couldn't find a user + current_app.logger.error(e) + return jsonify({}), 400 status_code = Freshdesk(contact).send_ticket() return jsonify({"status_code": status_code}), 204 @@ -520,7 +561,7 @@ def set_permissions(user_id, service_id): # TODO fix security hole, how do we verify that the user # who is making this request has permission to make the request. service_user = dao_get_service_user(user_id, service_id) - user = service_user.user + user = get_user_by_id(user_id) service = dao_fetch_service_by_id(service_id=service_id) data = request.get_json() @@ -564,7 +605,7 @@ def get_by_email(): @user_blueprint.route("/find-users-by-email", methods=["POST"]) def find_users_by_email(): - email, errors = partial_email_data_request_schema.load(request.get_json()) + email = partial_email_data_request_schema.load(request.get_json()) fetched_users = get_users_by_partial_email(email["email"]) result = [user.serialize_for_users_list() for user in fetched_users] return jsonify(data=result), 200 @@ -572,7 +613,7 @@ def find_users_by_email(): @user_blueprint.route("/reset-password", methods=["POST"]) def send_user_reset_password(): - email, errors = email_data_request_schema.load(request.get_json()) + email = email_data_request_schema.load(request.get_json()) user_to_send_to = get_user_by_email(email["email"]) @@ -601,21 +642,62 @@ def send_user_reset_password(): return jsonify({}), 204 +@user_blueprint.route("/forced-password-reset", methods=["POST"]) +def send_forced_user_reset_password(): + email = email_data_request_schema.load(request.get_json()) + + user_to_send_to = get_user_by_email(email["email"]) + + if user_to_send_to.blocked: + return jsonify({"message": "cannot reset password: user blocked"}), 400 + + template = dao_get_template_by_id(current_app.config["FORCED_PASSWORD_RESET_TEMPLATE_ID"]) + service = Service.query.get(current_app.config["NOTIFY_SERVICE_ID"]) + saved_notification = persist_notification( + template_id=template.id, + template_version=template.version, + recipient=email["email"], + service=service, + personalisation={ + "user_name": user_to_send_to.name, + "url": _create_reset_password_url(user_to_send_to.email_address), + }, + notification_type=template.template_type, + api_key_id=None, + key_type=KEY_TYPE_NORMAL, + reply_to_text=service.get_default_reply_to_email_address(), + ) + + send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) + + return jsonify({}), 204 + + @user_blueprint.route("//update-password", methods=["POST"]) def update_password(user_id): user = get_user_by_id(user_id=user_id) req_json = request.get_json() pwd = req_json.get("_password") - update_dct, errors = user_update_password_schema_load_json.load(req_json) - if errors: - raise InvalidRequest(errors, status_code=400) + + login_data = {} + + if "loginData" in req_json: + login_data = req_json["loginData"] + del req_json["loginData"] + + user_update_password_schema_load_json.load(req_json) response = pwnedpasswords.check(pwd) if response > 0: - errors.update({"password": ["Password is not allowed."]}) + errors = {"password": ["Password is not allowed."]} raise InvalidRequest(errors, status_code=400) update_user_password(user, pwd) + + # save login event + if login_data: + save_login_event(LoginEvent(user_id=user.id, data=login_data)) + changes = {"password": "password updated"} try: diff --git a/app/utils.py b/app/utils.py index c1d070ba1b..1836093753 100644 --- a/app/utils.py +++ b/app/utils.py @@ -11,6 +11,8 @@ ) from sqlalchemy import func +from app.config import Priorities, QueueNames + local_timezone = pytz.timezone(os.getenv("TIMEZONE", "America/Toronto")) @@ -29,7 +31,7 @@ def pagination_links(pagination, endpoint, **kwargs): def url_with_token(data, url, config, base_url=None): from notifications_utils.url_safe_token import generate_token - token = generate_token(data, config["SECRET_KEY"], config["DANGEROUS_SALT"]) + token = generate_token(data, config["SECRET_KEY"]) base_url = (base_url or config["ADMIN_BASE_URL"]) + url return base_url + token @@ -42,6 +44,10 @@ def get_template_instance(template, values): ](template, values) +def get_delivery_queue_for_template(template): + return QueueNames.DELIVERY_QUEUES[template.template_type][Priorities.to_lmh(template.process_type)] + + def get_html_email_body_from_template(template_instance): from app.models import EMAIL_TYPE @@ -195,3 +201,23 @@ def get_document_url(lang: str, path: str): def is_blank(content: Any) -> bool: content = str(content) return not content or content.isspace() + + +def get_limit_reset_time_et() -> dict[str, str]: + """ + This function gets the time when the daily limit resets (UTC midnight) + and returns this formatted in eastern time. This will either be 7PM or 8PM, + depending on the time of year.""" + + now = datetime.now() + one_day = timedelta(1.0) + next_midnight = datetime(now.year, now.month, now.day) + one_day + + utc = pytz.timezone("UTC") + et = pytz.timezone("US/Eastern") + + next_midnight_utc = next_midnight.astimezone(utc) + next_midnight_utc_in_et = next_midnight_utc.astimezone(et) + + limit_reset_time_et = {"12hr": next_midnight_utc_in_et.strftime("%-I%p"), "24hr": next_midnight_utc_in_et.strftime("%H")} + return limit_reset_time_et diff --git a/app/v2/errors.py b/app/v2/errors.py index 8f19dfdb02..62187005ea 100644 --- a/app/v2/errors.py +++ b/app/v2/errors.py @@ -30,14 +30,46 @@ def __init__(self, sending_limit): self.message = self.message_template.format(sending_limit) +class TooManySMSRequestsError(InvalidRequest): + status_code = 429 + message_template = "Exceeded SMS daily sending limit of {} fragments" + + def __init__(self, sending_limit): + self.message = self.message_template.format(sending_limit) + + +class TooManyEmailRequestsError(InvalidRequest): + status_code = 429 + messsage_template = "Exceeded email daily sending limit of {} messages" + + def __init__(self, sending_limit): + self.message = self.messsage_template.format(sending_limit) + + class LiveServiceTooManyRequestsError(TooManyRequestsError): pass +class LiveServiceTooManySMSRequestsError(TooManySMSRequestsError): + pass + + +class LiveServiceTooManyEmailRequestsError(TooManyEmailRequestsError): + pass + + class TrialServiceTooManyRequestsError(TooManyRequestsError): pass +class TrialServiceTooManySMSRequestsError(TooManySMSRequestsError): + pass + + +class TrialServiceTooManyEmailRequestsError(TooManyEmailRequestsError): + pass + + class RateLimitError(InvalidRequest): status_code = 429 message_template = "Exceeded rate limit for key type {} of {} requests per {} seconds" diff --git a/app/v2/notifications/get_notifications.py b/app/v2/notifications/get_notifications.py index f06775ad94..925d5ccdfb 100644 --- a/app/v2/notifications/get_notifications.py +++ b/app/v2/notifications/get_notifications.py @@ -27,7 +27,10 @@ def get_notification_by_id(notification_id): notification = notifications_dao.get_notification_with_personalisation( authenticated_service.id, notification_id, key_type=None ) - return jsonify(notification.serialize()), 200 + if notification is not None: + return jsonify(notification.serialize()), 200 + else: + return jsonify(result="error", message="Notification not found in database"), 404 @v2_notification_blueprint.route("//pdf", methods=["GET"]) @@ -53,7 +56,7 @@ def get_pdf_for_notification(notification_id): except Exception: raise PDFNotReadyError() - return send_file(filename_or_fp=BytesIO(pdf_data), mimetype="application/pdf") + return send_file(path_or_file=BytesIO(pdf_data), mimetype="application/pdf") @v2_notification_blueprint.route("", methods=["GET"]) diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index 371d3b368b..d8edf8627b 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -2,63 +2,87 @@ import csv import functools import uuid +from datetime import datetime from io import StringIO import werkzeug from flask import abort, current_app, jsonify, request +from notifications_utils import SMS_CHAR_COUNT_LIMIT from notifications_utils.recipients import ( RecipientCSV, try_validate_and_format_phone_number, ) +from notifications_utils.template import Template from app import ( api_user, authenticated_service, create_uuid, document_download_client, - encryption, + email_bulk_publish, + email_normal_publish, + email_priority_publish, notify_celery, + redis_store, + signer_notification, + sms_bulk_publish, + sms_normal_publish, + sms_priority_publish, statsd_client, ) from app.aws.s3 import upload_job_to_s3 from app.celery.letters_pdf_tasks import create_letters_pdf, process_virus_scan_passed from app.celery.research_mode_tasks import create_fake_letter_response_file -from app.celery.tasks import process_job, save_email, save_sms +from app.celery.tasks import process_job, seed_bounce_rate_in_redis from app.clients.document_download import DocumentDownloadError from app.config import QueueNames, TaskNames from app.dao.jobs_dao import dao_create_job from app.dao.notifications_dao import update_notification_status_by_reference from app.dao.services_dao import fetch_todays_total_message_count from app.dao.templates_dao import get_precompiled_letter_template +from app.encryption import NotificationDictToSign from app.letters.utils import upload_letter_pdf from app.models import ( + BULK, EMAIL_TYPE, JOB_STATUS_PENDING, JOB_STATUS_SCHEDULED, KEY_TYPE_TEAM, KEY_TYPE_TEST, LETTER_TYPE, + NORMAL, NOTIFICATION_CREATED, NOTIFICATION_DELIVERED, NOTIFICATION_PENDING_VIRUS_CHECK, NOTIFICATION_SENDING, + PRIORITY, SMS_TYPE, UPLOAD_DOCUMENT, + ApiKey, Notification, + NotificationType, + Service, + TemplateType, ) from app.notifications.process_letter_notifications import create_letter_notification from app.notifications.process_notifications import ( + choose_queue, + db_save_and_send_notification, persist_notification, persist_scheduled_notification, - send_notification_to_queue, simulated_recipient, + transform_notification, ) from app.notifications.validators import ( + check_email_daily_limit, check_rate_limiting, check_service_can_schedule_notification, check_service_email_reply_to_id, check_service_has_permission, check_service_sms_sender_id, + check_sms_daily_limit, + increment_email_daily_count_send_warnings_if_needed, + increment_sms_daily_count_send_warnings_if_needed, validate_and_format_recipient, validate_template, validate_template_exists, @@ -66,6 +90,8 @@ from app.schema_validation import validate from app.schemas import job_schema from app.service.utils import safelisted_members +from app.sms_fragment_utils import fetch_todays_requested_sms_count +from app.utils import get_delivery_queue_for_template from app.v2.errors import BadRequestError from app.v2.notifications import v2_notification_blueprint from app.v2.notifications.create_response import ( @@ -81,6 +107,8 @@ post_sms_request, ) +TWENTY_FOUR_HOURS_S = 24 * 60 * 60 + @v2_notification_blueprint.route("/{}".format(LETTER_TYPE), methods=["POST"]) def post_precompiled_letter_notification(): @@ -117,14 +145,35 @@ def post_precompiled_letter_notification(): return jsonify(resp), 201 +def _seed_bounce_data(epoch_timestamp: int, service_id: str): + current_time_s = int(datetime.utcnow().timestamp()) + time_difference_s = current_time_s - epoch_timestamp + + if 0 <= time_difference_s <= TWENTY_FOUR_HOURS_S: + # We are in the 24 hour window to seed bounce rate data + seed_bounce_rate_in_redis.apply_async(service_id) + else: + current_app.logger.info("Not in the time period to seed bounce rate {}".format(service_id)) + + +# flake8: noqa: C901 @v2_notification_blueprint.route("/bulk", methods=["POST"]) def post_bulk(): try: request_json = request.get_json() except werkzeug.exceptions.BadRequest as e: raise BadRequestError(message=f"Error decoding arguments: {e.description}", status_code=400) + except werkzeug.exceptions.UnsupportedMediaType as e: + raise BadRequestError( + message="UnsupportedMediaType error: {}".format(e.description), + status_code=415, + ) max_rows = current_app.config["CSV_MAX_ROWS"] + epoch_seeding_bounce = current_app.config["FF_BOUNCE_RATE_SEED_EPOCH_MS"] + if epoch_seeding_bounce: + _seed_bounce_data(epoch_seeding_bounce, str(authenticated_service.id)) + form = validate(request_json, post_bulk_request(max_rows)) if len([source for source in [form.get("rows"), form.get("csv")] if source]) != 1: @@ -132,7 +181,12 @@ def post_bulk(): template = validate_template_exists(form["template_id"], authenticated_service) check_service_has_permission(template.template_type, authenticated_service.permissions) - remaining_messages = authenticated_service.message_limit - fetch_todays_total_message_count(authenticated_service.id) + if template.template_type == SMS_TYPE: + fragments_sent = fetch_todays_requested_sms_count(authenticated_service.id) + remaining_messages = authenticated_service.sms_daily_limit - fragments_sent + else: + current_app.logger.info(f"[post_notifications.post_bulk()] Checking bounce rate for service: {authenticated_service.id}") + remaining_messages = authenticated_service.message_limit - fetch_todays_total_message_count(authenticated_service.id) form["validated_sender_id"] = validate_sender_id(template, form.get("reply_to_id")) @@ -152,18 +206,59 @@ def post_bulk(): max_rows=max_rows, safelist=safelisted_members(authenticated_service, api_user.key_type), remaining_messages=remaining_messages, + template=Template(template.__dict__), ) except csv.Error as e: raise BadRequestError(message=f"Error converting to CSV: {str(e)}", status_code=400) check_for_csv_errors(recipient_csv, max_rows, remaining_messages) + + for row in recipient_csv.get_rows(): + try: + validate_template(template.id, row.personalisation, authenticated_service, template.template_type) + except BadRequestError as e: + message = e.message + ". Notification to {} on row #{} exceeds the maximum size limit.".format( + row.recipient, row.index + 1 + ) + raise BadRequestError(message=message) + + if template.template_type == EMAIL_TYPE and api_user.key_type != KEY_TYPE_TEST: + check_email_daily_limit(authenticated_service, len(list(recipient_csv.get_rows()))) + scheduled_for = datetime.fromisoformat(form.get("scheduled_for")) if form.get("scheduled_for") else None + + if scheduled_for is None or not scheduled_for.date() > datetime.today().date(): + increment_email_daily_count_send_warnings_if_needed(authenticated_service, len(list(recipient_csv.get_rows()))) + + if template.template_type == SMS_TYPE: + # set sender_id if missing + if form["validated_sender_id"] is None: + default_senders = [x for x in authenticated_service.service_sms_senders if x.is_default] + default_sender_id = default_senders[0].id if default_senders else None + form["validated_sender_id"] = default_sender_id + + # calculate the number of simulated recipients + numberOfSimulated = sum( + simulated_recipient(i["phone_number"].data, template.template_type) for i in list(recipient_csv.get_rows()) + ) + mixedRecipients = numberOfSimulated > 0 and numberOfSimulated != len(list(recipient_csv.get_rows())) + + # if its a live or a team key, and they have specified testing and NON-testing recipients, raise an error + if api_user.key_type != KEY_TYPE_TEST and mixedRecipients: + raise BadRequestError(message="Bulk sending to testing and non-testing numbers is not supported", status_code=400) + + is_test_notification = api_user.key_type == KEY_TYPE_TEST or len(list(recipient_csv.get_rows())) == numberOfSimulated + + if not is_test_notification: + check_sms_daily_limit(authenticated_service, len(recipient_csv)) + increment_sms_daily_count_send_warnings_if_needed(authenticated_service, len(recipient_csv)) + job = create_bulk_job(authenticated_service, api_user, template, form, recipient_csv) - return jsonify(data=job_schema.dump(job).data), 201 + return jsonify(data=job_schema.dump(job)), 201 @v2_notification_blueprint.route("/", methods=["POST"]) -def post_notification(notification_type): +def post_notification(notification_type: NotificationType): try: request_json = request.get_json() except werkzeug.exceptions.BadRequest as e: @@ -171,6 +266,11 @@ def post_notification(notification_type): message="Error decoding arguments: {}".format(e.description), status_code=400, ) + except werkzeug.exceptions.UnsupportedMediaType as e: + raise BadRequestError( + message="UnsupportedMediaType error: {}".format(e.description), + status_code=415, + ) if notification_type == EMAIL_TYPE: form = validate(request_json, post_email_request) @@ -181,6 +281,10 @@ def post_notification(notification_type): else: abort(404) + epoch_seeding_bounce = current_app.config["FF_BOUNCE_RATE_SEED_EPOCH_MS"] + if epoch_seeding_bounce: + _seed_bounce_data(epoch_seeding_bounce, str(authenticated_service.id)) + check_service_has_permission(notification_type, authenticated_service.permissions) scheduled_for = form.get("scheduled_for", None) @@ -189,13 +293,24 @@ def post_notification(notification_type): check_rate_limiting(authenticated_service, api_user) + personalisation = strip_keys_from_personalisation_if_send_attach(form.get("personalisation", {})) template, template_with_content = validate_template( form["template_id"], - strip_keys_from_personalisation_if_send_attach(form.get("personalisation", {})), + personalisation, authenticated_service, notification_type, ) + if template.template_type == EMAIL_TYPE and api_user.key_type != KEY_TYPE_TEST: + check_email_daily_limit(authenticated_service, 1) # 1 email + + if template.template_type == SMS_TYPE: + is_test_notification = api_user.key_type == KEY_TYPE_TEST or simulated_recipient(form["phone_number"], notification_type) + if not is_test_notification: + check_sms_daily_limit(authenticated_service, 1) + + current_app.logger.info(f"Trying to send notification for Template ID: {template.id}") + reply_to = get_reply_to_text(notification_type, form, template) if notification_type == LETTER_TYPE: @@ -217,6 +332,14 @@ def post_notification(notification_type): template_with_content.values = notification.personalisation + if template.template_type == EMAIL_TYPE and api_user.key_type != KEY_TYPE_TEST: + increment_email_daily_count_send_warnings_if_needed(authenticated_service, 1) # 1 email + + if template.template_type == SMS_TYPE: + is_test_notification = api_user.key_type == KEY_TYPE_TEST or simulated_recipient(form["phone_number"], notification_type) + if not is_test_notification: + increment_sms_daily_count_send_warnings_if_needed(authenticated_service, 1) + if notification_type == SMS_TYPE: create_resp_partial = functools.partial(create_post_sms_response_from_notification, from_number=reply_to) elif notification_type == EMAIL_TYPE: @@ -244,7 +367,36 @@ def post_notification(notification_type): return jsonify(resp), 201 -def process_sms_or_email_notification(*, form, notification_type, api_key, template, service, reply_to_text=None): +def triage_notification_to_queues(notification_type: NotificationType, signed_notification_data, template: Template): + """Determine which queue to use based on notification_type and process_type + + Args: + notification_type: Type of notification being sent; either SMS_TYPE or EMAIL_TYPE + signed_notification_data: Encrypted notification data + template: Template used to send notification + Returns: + None + + """ + if notification_type == SMS_TYPE: + if template.process_type == PRIORITY: + sms_priority_publish.publish(signed_notification_data) + elif template.process_type == NORMAL: + sms_normal_publish.publish(signed_notification_data) + elif template.process_type == BULK: + sms_bulk_publish.publish(signed_notification_data) + elif notification_type == EMAIL_TYPE: + if template.process_type == PRIORITY: + email_priority_publish.publish(signed_notification_data) + elif template.process_type == NORMAL: + email_normal_publish.publish(signed_notification_data) + elif template.process_type == BULK: + email_bulk_publish.publish(signed_notification_data) + + +def process_sms_or_email_notification( + *, form, notification_type: NotificationType, api_key: ApiKey, template: Template, service: Service, reply_to_text=None +) -> Notification: form_send_to = form["email_address"] if notification_type == EMAIL_TYPE else form["phone_number"] send_to = validate_and_format_recipient( @@ -259,23 +411,25 @@ def process_sms_or_email_notification(*, form, notification_type, api_key, templ personalisation = process_document_uploads(form.get("personalisation"), service, simulated, template.id) - notification = { + _notification: NotificationDictToSign = { "id": create_uuid(), "template": str(template.id), - "template_version": str(template.version), + "service_id": str(service.id), + "template_version": str(template.version), # type: ignore "to": form_send_to, "personalisation": personalisation, "simulated": simulated, "api_key": str(api_key.id), "key_type": str(api_key.key_type), "client_reference": form.get("reference", None), + "reply_to_text": reply_to_text, } - encrypted_notification_data = encryption.encrypt(notification) - + signed_notification_data = signer_notification.sign(_notification) + notification = {**_notification} scheduled_for = form.get("scheduled_for", None) if scheduled_for: - notification = persist_notification( + notification = persist_notification( # keep scheduled notifications using the old code path for now template_id=template.id, template_version=template.version, recipient=form_send_to, @@ -285,27 +439,17 @@ def process_sms_or_email_notification(*, form, notification_type, api_key, templ api_key_id=api_key.id, key_type=api_key.key_type, client_reference=form.get("reference", None), - simulated=simulated, reply_to_text=reply_to_text, ) persist_scheduled_notification(notification.id, form["scheduled_for"]) + elif not simulated: + triage_notification_to_queues(notification_type, signed_notification_data, template) - elif current_app.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] and not simulated: - # depending on the type route to the appropriate save task - if notification_type == EMAIL_TYPE: - current_app.logger.info("calling save email task") - save_email.apply_async( - (authenticated_service.id, create_uuid(), encrypted_notification_data, None), - queue=QueueNames.DATABASE if not authenticated_service.research_mode else QueueNames.RESEARCH_MODE, - ) - elif notification_type == SMS_TYPE: - save_sms.apply_async( - (authenticated_service.id, create_uuid(), encrypted_notification_data, None), - queue=QueueNames.DATABASE if not authenticated_service.research_mode else QueueNames.RESEARCH_MODE, - ) - + current_app.logger.info( + f"Batch saving: {notification_type}/{template.process_type} {notification['id']} sent to buffer queue." + ) else: - notification = persist_notification( + notification = transform_notification( template_id=template.id, template_version=template.version, recipient=form_send_to, @@ -315,15 +459,16 @@ def process_sms_or_email_notification(*, form, notification_type, api_key, templ api_key_id=api_key.id, key_type=api_key.key_type, client_reference=form.get("reference", None), - simulated=simulated, reply_to_text=reply_to_text, ) if not simulated: - send_notification_to_queue( + notification.queue_name = choose_queue( notification=notification, research_mode=service.research_mode, - queue=template.queue_to_use(), + queue=get_delivery_queue_for_template(template), ) + db_save_and_send_notification(notification) + else: current_app.logger.debug("POST simulated notification for id: {}".format(notification.id)) @@ -342,7 +487,7 @@ def process_sms_or_email_notification(*, form, notification_type, api_key, templ return notification -def process_document_uploads(personalisation_data, service, simulated, template_id): +def process_document_uploads(personalisation_data, service: Service, simulated, template_id): file_keys = [k for k, v in (personalisation_data or {}).items() if isinstance(v, dict) and "file" in v] if not file_keys: return personalisation_data @@ -534,10 +679,16 @@ def check_for_csv_errors(recipient_csv, max_rows, remaining_messages): status_code=400, ) if recipient_csv.more_rows_than_can_send: - raise BadRequestError( - message=f"You only have {remaining_messages} remaining messages before you reach your daily limit. You've tried to send {nb_rows} messages.", - status_code=400, - ) + if recipient_csv.template_type == SMS_TYPE: + raise BadRequestError( + message=f"You only have {remaining_messages} remaining sms messages before you reach your daily limit. You've tried to send {len(recipient_csv)} sms messages.", + status_code=400, + ) + else: + raise BadRequestError( + message=f"You only have {remaining_messages} remaining messages before you reach your daily limit. You've tried to send {nb_rows} messages.", + status_code=400, + ) if recipient_csv.too_many_rows: raise BadRequestError( @@ -555,6 +706,12 @@ def check_for_csv_errors(recipient_csv, max_rows, remaining_messages): message=f"You cannot send to these recipients {explanation}", status_code=400, ) + if recipient_csv.template_type == SMS_TYPE and any(recipient_csv.rows_with_combined_variable_content_too_long): + raise BadRequestError( + message=f"Row {next(recipient_csv.rows_with_combined_variable_content_too_long).index + 1} - has a character count greater than {SMS_CHAR_COUNT_LIMIT} characters. Some messages may be too long due to custom content.", + status_code=400, + ) + if recipient_csv.rows_with_errors: def row_error(row): @@ -595,7 +752,7 @@ def create_bulk_job(service, api_key, template, form, recipient_csv): data["job_status"] = JOB_STATUS_SCHEDULED data["scheduled_for"] = form.get("scheduled_for") - job = job_schema.load(data).data + job = job_schema.load(data) dao_create_job(job) if job.job_status == JOB_STATUS_PENDING: diff --git a/app/variables.py b/app/variables.py index c6284d4ecc..404be5d865 100644 --- a/app/variables.py +++ b/app/variables.py @@ -1,6 +1,7 @@ # all jobs for letters created via the api must have this filename LETTER_API_FILENAME = "letter submitted via api" LETTER_TEST_API_FILENAME = "test letter submitted via api" +PT_DATA_RETENTION_DAYS = 3 # S3 tags diff --git a/application.py b/application.py index a76440c520..12cea1703c 100644 --- a/application.py +++ b/application.py @@ -3,31 +3,27 @@ import os -import awsgi import newrelic.agent # See https://bit.ly/2xBVKBH -import sentry_sdk +from apig_wsgi import make_lambda_handler +from aws_xray_sdk.core import xray_recorder +from aws_xray_sdk.ext.flask.middleware import XRayMiddleware from dotenv import load_dotenv from flask import Flask -from sentry_sdk.integrations.celery import CeleryIntegration -from sentry_sdk.integrations.flask import FlaskIntegration -from sentry_sdk.integrations.redis import RedisIntegration -from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration from werkzeug.middleware.proxy_fix import ProxyFix from app import create_app load_dotenv() -sentry_sdk.init( - dsn=os.environ.get("SENTRY_URL", ""), - integrations=[CeleryIntegration(), FlaskIntegration(), RedisIntegration(), SqlalchemyIntegration()], - release="notify-api@" + os.environ.get("GIT_SHA", ""), -) - application = Flask("api") application.wsgi_app = ProxyFix(application.wsgi_app) # type: ignore + app = create_app(application) -app + +xray_recorder.configure(service='api') +XRayMiddleware(app, xray_recorder) + +apig_wsgi_handler = make_lambda_handler(app, binary_support=True) if os.environ.get("USE_LOCAL_JINJA_TEMPLATES") == "True": print("") @@ -42,4 +38,5 @@ def handler(event, context): newrelic.agent.initialize() # noqa: E402 - return awsgi.response(app, event, context) + newrelic.agent.register_application(timeout=20.0) + return apig_wsgi_handler(event, context) diff --git a/bin/entry.sh b/bin/entry.sh index 1f7f66d176..8522d13493 100644 --- a/bin/entry.sh +++ b/bin/entry.sh @@ -1,6 +1,7 @@ #!/bin/sh if [ -z "${AWS_LAMBDA_RUNTIME_API}" ]; then - exec /usr/bin/aws-lambda-rie /usr/local/bin/python -m awslambdaric $1 + exec /usr/bin/aws-lambda-rie $(which python) -m awslambdaric $1 else - exec /usr/local/bin/python -m awslambdaric $1 + . /sync_lambda_envs.sh # Retrieve .env from parameter store and remove currently set environement variables + exec $(which python) -m awslambdaric $1 fi \ No newline at end of file diff --git a/bin/execute_and_publish_performance_test.sh b/bin/execute_and_publish_performance_test.sh index ef7fe0a5bf..fa2d50b05a 100755 --- a/bin/execute_and_publish_performance_test.sh +++ b/bin/execute_and_publish_performance_test.sh @@ -1,13 +1,24 @@ #!/bin/bash +# Setup current_time=$(date "+%Y.%m.%d-%H.%M.%S") perf_test_aws_s3_bucket=${PERF_TEST_AWS_S3_BUCKET:-notify-performance-test-results-staging} perf_test_csv_directory_path=${PERF_TEST_CSV_DIRECTORY_PATH:-/tmp/notify_performance_test} - mkdir -p $perf_test_csv_directory_path/$current_time +# Run old performance test and copy results to S3 locust --headless --config tests-perf/locust/locust.conf --html $perf_test_csv_directory_path/$current_time/index.html --csv $perf_test_csv_directory_path/$current_time/perf_test - aws s3 cp $perf_test_csv_directory_path/ "s3://$perf_test_aws_s3_bucket" --recursive || exit 1 +# Sleep 15 minutes to allow the system to stabilize +sleep 900 + +# Run email send rate performance test +# This configuration should send 10K emails / minute for 10 minutes for 100K emails total. +# We run this test on Tuesday through Friday (just after midnight UTC) only. +if [ "$(date +%u)" -ge 2 ] && [ "$(date +%u)" -le 5 ]; then + locust --headless --host https://api.staging.notification.cdssandbox.xyz --locustfile tests-perf/locust/send_rate_email.py --users 5 --run-time 10m --spawn-rate 1 +fi + +# Cleanup rm -rf $perf_test_csv_directory_path/$current_time diff --git a/bin/get_newrelic_layer.sh b/bin/get_newrelic_layer.sh new file mode 100755 index 0000000000..ad6817c55a --- /dev/null +++ b/bin/get_newrelic_layer.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +aws lambda get-layer-version-by-arn \ +--region ca-central-1 \ +--arn arn:aws:lambda:ca-central-1:451483290750:layer:NewRelicPython39:12 \ +| jq -r '.Content.Location' \ +| xargs curl -o ../newrelic-layer.zip \ No newline at end of file diff --git a/bin/sync_lambda_envs.sh b/bin/sync_lambda_envs.sh new file mode 100755 index 0000000000..3401d1f23b --- /dev/null +++ b/bin/sync_lambda_envs.sh @@ -0,0 +1,43 @@ +#!/bin/sh + +# This script will retrieve notification environment variables from AWS parameter store +# Since lambda & k8s environments have some variance, this script will remove any environment +# variable that is already set when run within the lambda runtime environment + +TMP_ENV_FILE="/tmp/.env" + +var_expand() { + if [ -z "${1-}" ] || [ $# -ne 1 ]; then + printf 'var_expand: expected one argument\n' >&2; + return 1; + fi + eval printf '%s' "\"\${$1?}\"" 2> /dev/null # Variable double substitution to be able to check for variable +} + +load_non_existing_envs() { + local envFile=${1:-.env} + local isComment='^[[:space:]]*#' + local isBlank='^[[:space:]]*$' + while IFS= read -r line; do + if echo $line | grep -Eq $isComment; then # Ignore comment line + continue + fi + if echo $line | grep -Eq $isBlank; then # Ignore blank line + continue + fi + key=$(echo "$line" | cut -d '=' -f 1) + value=$(echo "$line" | cut -d '=' -f 2-) + + if [ -z $(var_expand $key) ]; then # Check if environment variable doesn't exist + export "${key}=${value}" + fi + + done < $TMP_ENV_FILE +} + +if [ ! -f "$TMP_ENV_FILE" ]; then # Only setup envs once per lambda lifecycle + echo "Retrieving environment parameters" + aws ssm get-parameters --region ca-central-1 --with-decryption --names ENVIRONMENT_VARIABLES --query 'Parameters[*].Value' --output text > "$TMP_ENV_FILE" +fi + +load_non_existing_envs diff --git a/catalog-info.yaml b/catalog-info.yaml new file mode 100644 index 0000000000..5d62d960c8 --- /dev/null +++ b/catalog-info.yaml @@ -0,0 +1,18 @@ +# Metadata for the backstage catalog accessible at this link: +# https://backstage.cdssandbox.xyz/ +--- +apiVersion: backstage.io/v1alpha1 +kind: Component +metadata: + name: notification-api-service + title: GC Notify API | GC Notification API + description: REST API service for GC Notification + annotations: + github.com/project-slug: cds-snc/notification-api + labels: + license: MIT +spec: + type: service + lifecycle: production + owner: group:cds-snc/notify-dev + system: gc-notification diff --git a/ci/Dockerfile b/ci/Dockerfile index 18837feac6..144e6bb07f 100644 --- a/ci/Dockerfile +++ b/ci/Dockerfile @@ -1,27 +1,36 @@ -FROM python:3.9-alpine3.13 +FROM python:3.10-alpine3.16@sha256:afe68972cc00883d70b3760ee0ffbb7375cf09706c122dda7063ffe64c5be21b ENV PYTHONDONTWRITEBYTECODE 1 +ENV APP_VENV="/app/.venv" +ENV POETRY_HOME="/opt/poetry" +ENV POETRY_VERSION="1.7.1" +ENV POETRY_VIRTUALENVS_CREATE="false" +ENV PATH="${APP_VENV}/bin:${POETRY_HOME}/bin:$PATH" RUN apk add --no-cache bash build-base git gcc musl-dev postgresql-dev g++ make libffi-dev libmagic libcurl curl-dev rust cargo && rm -rf /var/cache/apk/* -# update pip -RUN python -m pip install wheel -RUN python -m pip install --upgrade pip - RUN set -ex && mkdir /app - WORKDIR /app -COPY requirements.txt /app -RUN set -ex && pip3 install -r requirements.txt +# Install poetry and isolate it in it's own venv +RUN python -m venv ${POETRY_HOME} \ + && ${POETRY_HOME}/bin/pip3 install poetry==${POETRY_VERSION} + +COPY pyproject.toml poetry.lock /app/ + +RUN python -m venv ${APP_VENV} \ + && . ${APP_VENV}/bin/activate \ + && poetry install \ + && poetry add wheel -COPY . /app +COPY . /app/ -RUN make generate-version-file +RUN . ${APP_VENV}/bin/activate \ + && make generate-version-file ENV PORT=6011 ARG GIT_SHA ENV GIT_SHA ${GIT_SHA} -CMD ["sh", "-c", "gunicorn -c gunicorn_config.py application"] +CMD ["sh", "-c", "gunicorn -c gunicorn_config.py application"] \ No newline at end of file diff --git a/ci/Dockerfile.lambda b/ci/Dockerfile.lambda index 5a5979c487..c827b08c24 100644 --- a/ci/Dockerfile.lambda +++ b/ci/Dockerfile.lambda @@ -1,24 +1,34 @@ -FROM python:3.9-alpine3.13 +FROM python:3.10-alpine3.16@sha256:afe68972cc00883d70b3760ee0ffbb7375cf09706c122dda7063ffe64c5be21b +ENV PYTHONPATH "${PYTHONPATH}:/opt/python/lib/python3.10/site-packages" ENV PYTHONDONTWRITEBYTECODE 1 +ENV TASK_ROOT /app +ENV APP_VENV="${TASK_ROOT}/.venv" +ENV POETRY_HOME="/opt/poetry" +ENV POETRY_VERSION="1.7.1" +ENV POETRY_VIRTUALENVS_CREATE="false" +ENV PATH="${APP_VENV}/bin:${POETRY_HOME}/bin:$PATH" -RUN apk add --no-cache bash build-base git libtool cmake autoconf automake gcc musl-dev postgresql-dev g++ libexecinfo-dev make libffi-dev libmagic libcurl curl-dev rust cargo && rm -rf /var/cache/apk/* +RUN apk add --no-cache bash build-base git libtool cmake autoconf automake gcc musl-dev postgresql-dev g++ libc6-compat libexecinfo-dev make libffi-dev libmagic libcurl curl-dev rust cargo && rm -rf /var/cache/apk/* -# update pip -RUN python -m pip install wheel -RUN python -m pip install --upgrade pip +RUN mkdir -p ${TASK_ROOT} +WORKDIR ${TASK_ROOT} -RUN set -ex && mkdir /app +# Install poetry and isolate it in it's own venv +RUN python -m venv ${POETRY_HOME} \ + && ${POETRY_HOME}/bin/pip3 install poetry==${POETRY_VERSION} -WORKDIR /app +COPY pyproject.toml poetry.lock ${TASK_ROOT}/ -COPY requirements.txt /app -RUN set -ex && pip3 install -r requirements.txt -RUN pip3 install awslambdaric +RUN python -m venv ${APP_VENV} \ + && . ${APP_VENV}/bin/activate \ + && poetry install \ + && poetry add awslambdaric newrelic-lambda wheel -COPY . /app +COPY . ${TASK_ROOT}/ -RUN make generate-version-file +RUN . ${APP_VENV}/bin/activate \ + && make generate-version-file ENV PORT=6011 @@ -28,7 +38,14 @@ ENV GIT_SHA ${GIT_SHA} # (Optional) Add Lambda Runtime Interface Emulator and use a script in the ENTRYPOINT for simpler local runs ADD https://github.com/aws/aws-lambda-runtime-interface-emulator/releases/latest/download/aws-lambda-rie /usr/bin/aws-lambda-rie COPY bin/entry.sh / -RUN chmod 755 /usr/bin/aws-lambda-rie /entry.sh +COPY bin/sync_lambda_envs.sh / +RUN chmod 755 /usr/bin/aws-lambda-rie /entry.sh /sync_lambda_envs.sh + +# New Relic lambda layer +RUN unzip newrelic-layer.zip -d /opt && rm newrelic-layer.zip ENTRYPOINT [ "/entry.sh" ] -CMD [ "application.handler" ] \ No newline at end of file + +# Launch the New Relic lambda wrapper which will then launch the app +# handler defined in the NEW_RELIC_LAMBDA_HANDLER environment variable +CMD [ "newrelic_lambda_wrapper.handler" ] \ No newline at end of file diff --git a/ci/Dockerfile.test b/ci/Dockerfile.test index 53354c7c51..3a5874db57 100644 --- a/ci/Dockerfile.test +++ b/ci/Dockerfile.test @@ -1,25 +1,30 @@ # Heavily inspired from Dockerfile, this one also install requirements_for_test.txt -FROM python:3.9-alpine +FROM python:3.10-alpine@sha256:7edffe5acc6a2c4c009fece2fbdc85f04fde4c8481202473b880ef3f8fbb2939 ENV PYTHONDONTWRITEBYTECODE 1 +ENV POETRY_VERSION "1.7.1" +ARG APP_VENV="/app/.venv" +ARG POETRY_HOME="/opt/poetry" +ARG POETRY_VERSION="1.7.1" +ARG POETRY_VIRTUALENVS_CREATE="false" +ENV PATH="${APP_VENV}/bin:${POETRY_HOME}/bin:$PATH" RUN apk add --no-cache bash build-base git gcc musl-dev postgresql-dev g++ make libffi-dev libmagic libcurl curl-dev && rm -rf /var/cache/apk/* -# update pip -RUN python -m pip install wheel - RUN set -ex && mkdir /app - WORKDIR /app -COPY requirements.txt /app -COPY requirements_for_test.txt /app +# Install Poetry and isolate it from the project +RUN python -m venv ${POETRY_HOME} \ + && ${POETRY_HOME}/bin/pip3 install poetry==${POETRY_VERSION} -RUN set -ex && pip3 install -r requirements.txt -RUN set -ex && pip3 install -r requirements_for_test.txt +COPY . /app/ -COPY . /app +RUN python -m venv ${APP_VENV} \ + && . ${APP_VENV}/bin/activate \ + && poetry install \ + && poetry add wheel RUN make generate-version-file @@ -28,4 +33,4 @@ ENV PORT=6011 ARG GIT_SHA ENV GIT_SHA ${GIT_SHA} -CMD ["sh", "-c", "gunicorn -c gunicorn_config.py application"] +CMD ["sh", "-c", "poetry run gunicorn -c gunicorn_config.py application"] diff --git a/docker-compose.yml b/docker-compose.yml index bb5becca55..443727d0c6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,7 +2,7 @@ version: '3' services: db: - image: postgres:11.2 + image: postgres:11.16@sha256:5d2aa4a7b5f9bdadeddcf87cf7f90a176737a02a30d917de4ab2e6a329bd2d45 volumes: - ./local/initdb:/docker-entrypoint-initdb.d environment: @@ -18,7 +18,7 @@ services: - "listen_addresses=*" restart: always redis: - image: redis:latest + image: redis:6.2@sha256:d4948d011cc38e94f0aafb8f9a60309bd93034e07d10e0767af534512cf012a9 web: image: notification-api restart: always @@ -30,11 +30,11 @@ services: - SQLALCHEMY_DATABASE_URI=postgres://postgres:chummy@db:5432/notification_api entrypoint: local/scripts/notify-web-entrypoint.sh command: > - bash -c "make generate-version-file && flask run -p 6011 --host=0.0.0.0" + bash -c "make generate-version-file && make run" volumes: - .:/app ports: - - "6011:6011" + - "6011:6011" depends_on: - db - redis diff --git a/gunicorn_config.py b/gunicorn_config.py index 18b5593ab0..e43a7cb288 100644 --- a/gunicorn_config.py +++ b/gunicorn_config.py @@ -2,17 +2,20 @@ import sys import traceback +import gunicorn # type: ignore import newrelic.agent # See https://bit.ly/2xBVKBH newrelic.agent.initialize() # noqa: E402 workers = 4 -worker_class = "eventlet" +worker_class = "gevent" worker_connections = 256 bind = "0.0.0.0:{}".format(os.getenv("PORT")) accesslog = "-" +# Guincorn sets the server type on our app. We don't want to show it in the header in the response. +gunicorn.SERVER = "Undisclosed" -on_aws = os.environ.get("NOTIFY_ENVIRONMENT", "") in ["production", "staging"] +on_aws = os.environ.get("NOTIFY_ENVIRONMENT", "") in ["production", "staging", "scratch", "dev"] if on_aws: # To avoid load balancers reporting errors on shutdown instances, see AWS doc # > We also recommend that you configure the idle timeout of your application diff --git a/heartbeat/Dockerfile b/heartbeat/Dockerfile deleted file mode 100644 index 4da92b4169..0000000000 --- a/heartbeat/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM public.ecr.aws/lambda/python:3.9 - -ENV PYTHONDONTWRITEBYTECODE 1 - -# Install the function's dependencies -COPY heartbeat/requirements_for_heartbeat.txt ${LAMBDA_TASK_ROOT} -RUN python -m pip install -r requirements_for_heartbeat.txt - -# Copy function code -COPY heartbeat/heartbeat.py ${LAMBDA_TASK_ROOT} - -CMD [ "heartbeat.handler" ] diff --git a/heartbeat/heartbeat.py b/heartbeat/heartbeat.py deleted file mode 100644 index ae936e6f02..0000000000 --- a/heartbeat/heartbeat.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -Code to keep the lambda function alive. -""" -import ast -import os -import uuid -from typing import List - -from notifications_python_client.errors import HTTPError -from notifications_python_client.notifications import NotificationsAPIClient - -API_KEY: str = os.getenv("heartbeat_api_key", "") -# As we can't pass in a list to env var, we pass a str and convert it. -BASE_URL: List[str] = ast.literal_eval(os.getenv("heartbeat_base_url")) # type: ignore -EMAIL_ADDRESS = "success@simulator.amazonses.com" -TEMPLATE_ID: uuid.UUID = os.getenv("heartbeat_template_id") # type: ignore - - -def handler(event, context): - if not BASE_URL: - print("Variable BASE_URL is missing") - if not API_KEY: - print("Variable API_KEY is missing") - if not TEMPLATE_ID: - print("Variable TEMPLATE_ID is missing") - for base_url in BASE_URL: - notifications_client = NotificationsAPIClient(API_KEY, base_url=base_url) - try: - notifications_client.send_email_notification(email_address=EMAIL_ADDRESS, template_id=TEMPLATE_ID) - print("Email has been sent by {}!".format(base_url)) - except HTTPError as e: - print(f"Could not send heartbeat: status={e.status_code}, msg={e.message}") - raise diff --git a/heartbeat/requirements_for_heartbeat.txt b/heartbeat/requirements_for_heartbeat.txt deleted file mode 100644 index 0ed0a7d5ce..0000000000 --- a/heartbeat/requirements_for_heartbeat.txt +++ /dev/null @@ -1,6 +0,0 @@ -# pyup: ignore file -# This file is autogenerated. Do not edit it manually. -# Run `make freeze-requirements` to update requirements.txt -# with package version changes made in requirements-app.txt - -notifications-python-client==6.0.2 diff --git a/local/Dockerfile b/local/Dockerfile index a368676636..8c0e128f7a 100644 --- a/local/Dockerfile +++ b/local/Dockerfile @@ -1,24 +1,24 @@ -FROM python:3.9-alpine +FROM python:3.10-alpine@sha256:7edffe5acc6a2c4c009fece2fbdc85f04fde4c8481202473b880ef3f8fbb2939 ENV PYTHONDONTWRITEBYTECODE 1 +ENV POETRY_VERSION "1.7.1" RUN apk add --no-cache bash build-base git gcc musl-dev postgresql-dev g++ make libffi-dev libmagic libcurl curl-dev && rm -rf /var/cache/apk/* # update pip -RUN python -m pip install wheel +RUN python -m pip install wheel poetry==${POETRY_VERSION} RUN set -ex && mkdir /app WORKDIR /app -COPY requirements.txt /app -RUN set -ex && pip3 install -r requirements.txt - COPY . /app +RUN poetry install + ENV PORT=6011 ARG GIT_SHA ENV GIT_SHA ${GIT_SHA} -CMD ["sh", "-c", "gunicorn -c gunicorn_config.py application"] \ No newline at end of file +CMD ["sh", "-c", "poetry run gunicorn -c gunicorn_config.py application"] \ No newline at end of file diff --git a/migrations/versions/0011_ad_provider_details.py b/migrations/versions/0011_ad_provider_details.py index bf6c0f4493..d288eb5cc2 100644 --- a/migrations/versions/0011_ad_provider_details.py +++ b/migrations/versions/0011_ad_provider_details.py @@ -104,7 +104,6 @@ def upgrade(): def downgrade(): - op.drop_index(op.f("ix_provider_statistics_provider_id"), table_name="provider_statistics") op.drop_column("provider_statistics", "provider_id") op.drop_index(op.f("ix_provider_rates_provider_id"), table_name="provider_rates") diff --git a/migrations/versions/0012_complete_provider_details.py b/migrations/versions/0012_complete_provider_details.py index 82f617e7cc..69e4f11aa6 100644 --- a/migrations/versions/0012_complete_provider_details.py +++ b/migrations/versions/0012_complete_provider_details.py @@ -17,7 +17,6 @@ def upgrade(): - op.alter_column("provider_rates", "provider_id", existing_type=postgresql.UUID(), nullable=False) op.drop_column("provider_rates", "provider") op.alter_column( @@ -31,7 +30,6 @@ def upgrade(): def downgrade(): - provider_enum = ENUM( "loadtesting", "firetext", diff --git a/migrations/versions/0013_add_loadtest_client.py b/migrations/versions/0013_add_loadtest_client.py index 5f86f56903..62f3c6ae0c 100644 --- a/migrations/versions/0013_add_loadtest_client.py +++ b/migrations/versions/0013_add_loadtest_client.py @@ -18,7 +18,6 @@ def upgrade(): - op.execute( "INSERT INTO provider_details (id, display_name, identifier, priority, notification_type, active) values ('{}', 'Loadtesting', 'loadtesting', 30, 'sms', true)".format( str(uuid.uuid4()) diff --git a/migrations/versions/0110_monthly_billing.py b/migrations/versions/0110_monthly_billing.py index f8efe48cc9..43bf0b66f0 100644 --- a/migrations/versions/0110_monthly_billing.py +++ b/migrations/versions/0110_monthly_billing.py @@ -16,7 +16,6 @@ def upgrade(): - op.create_table( "monthly_billing", sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), diff --git a/migrations/versions/0140_sms_prefix_non_nullable.py b/migrations/versions/0140_sms_prefix_non_nullable.py index fba8dcdce6..899d4ca31c 100644 --- a/migrations/versions/0140_sms_prefix_non_nullable.py +++ b/migrations/versions/0140_sms_prefix_non_nullable.py @@ -15,7 +15,6 @@ def upgrade(): - op.execute( """ update services @@ -35,7 +34,6 @@ def upgrade(): def downgrade(): - op.alter_column( "services", "prefix_sms", diff --git a/migrations/versions/0215_email_brand_type.py b/migrations/versions/0215_email_brand_type.py index 553335182a..b4bef4b61f 100644 --- a/migrations/versions/0215_email_brand_type.py +++ b/migrations/versions/0215_email_brand_type.py @@ -13,7 +13,6 @@ def upgrade(): - op.add_column("email_branding", sa.Column("brand_type", sa.String(length=255), nullable=True)) op.create_index( op.f("ix_email_branding_brand_type"), diff --git a/migrations/versions/0221_nullable_service_branding.py b/migrations/versions/0221_nullable_service_branding.py index 5e121b9e75..cb1a60cee2 100644 --- a/migrations/versions/0221_nullable_service_branding.py +++ b/migrations/versions/0221_nullable_service_branding.py @@ -12,7 +12,6 @@ def upgrade(): - op.drop_constraint("services_branding_fkey", "services", type_="foreignkey") op.drop_index("ix_services_history_branding", table_name="services_history") @@ -47,7 +46,6 @@ def upgrade(): def downgrade(): - op.create_index(op.f("ix_services_branding"), "services", ["branding"], unique=False) op.create_index( op.f("ix_services_history_branding"), diff --git a/migrations/versions/0222_drop_service_branding.py b/migrations/versions/0222_drop_service_branding.py index fbcada6a16..ae08e2c0b5 100644 --- a/migrations/versions/0222_drop_service_branding.py +++ b/migrations/versions/0222_drop_service_branding.py @@ -11,12 +11,10 @@ def upgrade(): - op.drop_column("services_history", "branding") op.drop_column("services", "branding") def downgrade(): - op.add_column("services", sa.Column("branding", sa.String(length=255))) op.add_column("services_history", sa.Column("branding", sa.String(length=255))) diff --git a/migrations/versions/0223_add_domain_constraint.py b/migrations/versions/0223_add_domain_constraint.py index fb2472c4f3..783c3fd653 100644 --- a/migrations/versions/0223_add_domain_constraint.py +++ b/migrations/versions/0223_add_domain_constraint.py @@ -10,7 +10,6 @@ def upgrade(): - op.execute( """ update @@ -25,5 +24,4 @@ def upgrade(): def downgrade(): - op.drop_constraint("uq_email_branding_domain", "email_branding") diff --git a/migrations/versions/0300d_update_invite_email.py b/migrations/versions/0300d_update_invite_email.py index 0a53031c5e..67ec71d4f2 100644 --- a/migrations/versions/0300d_update_invite_email.py +++ b/migrations/versions/0300d_update_invite_email.py @@ -15,7 +15,6 @@ def upgrade(): - op.execute( """ UPDATE diff --git a/migrations/versions/0325_set_transaction_timeout.py b/migrations/versions/0325_set_transaction_timeout.py index 99f3e39d93..182237e349 100644 --- a/migrations/versions/0325_set_transaction_timeout.py +++ b/migrations/versions/0325_set_transaction_timeout.py @@ -12,11 +12,12 @@ user = "postgres" timeout = 1200 # in seconds, i.e. 20 minutes +database_name = op.get_bind().engine.url.database # database name that the migration is being run on def upgrade(): - op.execute(f"ALTER ROLE {user} SET statement_timeout = '{timeout}s'") + op.execute(f"ALTER ROLE {user} IN DATABASE {database_name} SET statement_timeout = '{timeout}s'") def downgrade(): - op.execute(f"ALTER ROLE {user} RESET statement_timeout") + op.execute(f"ALTER ROLE {user} IN DATABASE {database_name} RESET statement_timeout") diff --git a/migrations/versions/0326_add_queue_notifications.py b/migrations/versions/0326_add_queue_notifications.py new file mode 100644 index 0000000000..4bd2908528 --- /dev/null +++ b/migrations/versions/0326_add_queue_notifications.py @@ -0,0 +1,26 @@ +""" + +Revision ID: 0326_add_queue_notifications +Revises: 0325_set_transaction_timeout +Create Date: 2021-07-29 17:30:00 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "0326_add_queue_notifications" +down_revision = "0325_set_transaction_timeout" + +user = "postgres" +timeout = 1200 # in seconds, i.e. 20 minutes + + +def upgrade(): + op.add_column("notifications", sa.Column("queue_name", sa.Text(), nullable=True)) + op.add_column("notification_history", sa.Column("queue_name", sa.Text(), nullable=True)) + + +def downgrade(): + op.drop_column("notifications", "queue_name") + op.drop_column("notification_history", "queue_name") diff --git a/migrations/versions/0327_add_password_expired.py b/migrations/versions/0327_add_password_expired.py new file mode 100644 index 0000000000..89407f3edf --- /dev/null +++ b/migrations/versions/0327_add_password_expired.py @@ -0,0 +1,25 @@ +""" + +Revision ID: 0327_add_password_expired +Revises: 0326_add_queue_notifications +Create Date: 2022-04-06 13:00:00 + +""" +import sqlalchemy as sa +from alembic import op + +revision = "0327_add_password_expired" +down_revision = "0326_add_queue_notifications" + +user = "postgres" +timeout = 1200 # in seconds, i.e. 20 minutes + + +def upgrade(): + op.add_column("users", sa.Column("password_expired", sa.Boolean(), nullable=True, server_default=sa.false())) + op.execute("UPDATE users SET password_expired = false") + op.alter_column("users", "password_expired", nullable=False) + + +def downgrade(): + op.drop_column("users", "password_expired") diff --git a/migrations/versions/0419_add_forced_pass_template.py b/migrations/versions/0419_add_forced_pass_template.py new file mode 100644 index 0000000000..2af9995df5 --- /dev/null +++ b/migrations/versions/0419_add_forced_pass_template.py @@ -0,0 +1,84 @@ +"""empty message + +Revision ID: 0419_add_forced_pass_template +Revises: 0327_add_password_expired +Create Date: 2022-04-19 13:00:00 + +""" + +import uuid + +# revision identifiers, used by Alembic. +from datetime import datetime + +from alembic import op + +from app.encryption import hashpw + +revision = "0419_add_forced_pass_template" +down_revision = "0327_add_password_expired" + + +user_id = "6af522d0-2915-4e52-83a3-3690455a5fe6" +service_id = "d6aa2c68-a2d9-4437-ab19-3ae8eb202553" + + +def upgrade(): + op.get_bind() + template_insert = """INSERT INTO templates (id, name, template_type, created_at, + content, archived, service_id, subject, created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', False) + """ + + template_history_insert = """INSERT INTO templates_history (id, name, template_type, created_at, + content, archived, service_id, + subject, created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, 'normal', False) + """ + + password_reset_content = """ + Hi ((Name)), + To reset your password, click this link: + [Password reset](((url))?lang=en) + This is your unique link. Do not share this link with anyone. + If you didnt request this email, please [contact us](https://notification.canada.ca/contact?lang=en). + ___ + Bonjour ((Name)), + Pour réinitialiser votre mot de passe, veuillez cliquer sur le lien suivant : + [Réinitialisation de votre mot de passe](((url))?lang=fr) + Ce lien est unique. Ne le transmettez à personne. + Si vous navez pas demandé ce courriel, veuillez [nous contacter](https://notification.canada.ca/contact?lang=fr). + """ + + op.execute( + template_history_insert.format( + "e9a65a6b-497b-42f2-8f43-1736e43e13b3", + "Notify forced-password reset email", + "email", + datetime.utcnow(), + password_reset_content, + service_id, + "Force reset your Notify password", + user_id, + ) + ) + + op.execute( + template_insert.format( + "e9a65a6b-497b-42f2-8f43-1736e43e13b3", + "Notify forced-password reset email", + "email", + datetime.utcnow(), + password_reset_content, + service_id, + "Force reset your Notify password", + user_id, + "normal", + ) + ) + + +def downgrade(): + op.get_bind() + op.execute("delete from templates where id = '{}'".format("e9a65a6b-497b-42f2-8f43-1736e43e13b3")) + op.execute("delete from templates_history where id = '{}'".format("e9a65a6b-497b-42f2-8f43-1736e43e13b3")) diff --git a/migrations/versions/0420_add_redacted_template.py b/migrations/versions/0420_add_redacted_template.py new file mode 100644 index 0000000000..5039e43a74 --- /dev/null +++ b/migrations/versions/0420_add_redacted_template.py @@ -0,0 +1,41 @@ +"""empty message + +Revision ID: 0420_add_redacted_template +Revises: 0419_add_forced_pass_template +Create Date: 2022-04-19 13:00:00 + +""" + +import uuid + +# revision identifiers, used by Alembic. +from datetime import datetime + +from alembic import op + +revision = "0420_add_redacted_template" +down_revision = "0419_add_forced_pass_template" + +user_id = "6af522d0-2915-4e52-83a3-3690455a5fe6" +service_id = "d6aa2c68-a2d9-4437-ab19-3ae8eb202553" + + +def upgrade(): + op.get_bind() + + redacted_template_insert = """INSERT INTO template_redacted(template_id, redact_personalisation, updated_at, updated_by_id) + VALUES ('{}', False, '{}', '{}') + """ + + op.execute( + redacted_template_insert.format( + "e9a65a6b-497b-42f2-8f43-1736e43e13b3", + datetime.utcnow(), + user_id, + ) + ) + + +def downgrade(): + op.get_bind() + op.execute("delete from template_redacted where template_id = '{}'".format("e9a65a6b-497b-42f2-8f43-1736e43e13b3")) diff --git a/migrations/versions/0421_add_sms_daily_limit.py b/migrations/versions/0421_add_sms_daily_limit.py new file mode 100644 index 0000000000..04be5683a3 --- /dev/null +++ b/migrations/versions/0421_add_sms_daily_limit.py @@ -0,0 +1,31 @@ +""" + +Revision ID: 0421_add_sms_daily_limit +Revises: 0420_add_redacted_template +Create Date: 2022-09-02 16:00:00 + +""" +import sqlalchemy as sa +from alembic import op + +revision = "0421_add_sms_daily_limit" +down_revision = "0420_add_redacted_template" + +user = "postgres" +timeout = 1200 # in seconds, i.e. 20 minutes +default = 1000 + + +def upgrade(): + op.add_column("services", sa.Column("sms_daily_limit", sa.BigInteger(), nullable=True)) + op.execute(f"UPDATE services SET sms_daily_limit = {default}") + op.alter_column("services", "sms_daily_limit", nullable=False) + + op.add_column("services_history", sa.Column("sms_daily_limit", sa.BigInteger(), nullable=True)) + op.execute(f"UPDATE services_history SET sms_daily_limit = {default}") + op.alter_column("services_history", "sms_daily_limit", nullable=False) + + +def downgrade(): + op.drop_column("services", "sms_daily_limit") + op.drop_column("services_history", "sms_daily_limit") diff --git a/migrations/versions/0422_add_billable_units.py b/migrations/versions/0422_add_billable_units.py new file mode 100644 index 0000000000..219534d566 --- /dev/null +++ b/migrations/versions/0422_add_billable_units.py @@ -0,0 +1,29 @@ +""" + +Revision ID: 0422_add_billable_units +Revises: 0421_add_sms_daily_limit +Create Date: 2022-09-082 15:45:00 + +""" +import sqlalchemy as sa +from alembic import op + +revision = "0422_add_billable_units" +down_revision = "0421_add_sms_daily_limit" + +user = "postgres" +timeout = 1200 # in seconds, i.e. 20 minutes +default = 1 + + +def upgrade(): + op.add_column( + "ft_notification_status", + sa.Column("billable_units", sa.Integer(), nullable=True), + ) + op.execute(f"UPDATE ft_notification_status SET billable_units = notification_count") + op.alter_column("ft_notification_status", "billable_units", nullable=False) + + +def downgrade(): + op.drop_column("ft_notification_status", "billable_units") diff --git a/migrations/versions/0423_daily_sms_limit_updated.py b/migrations/versions/0423_daily_sms_limit_updated.py new file mode 100644 index 0000000000..d1019d0a8d --- /dev/null +++ b/migrations/versions/0423_daily_sms_limit_updated.py @@ -0,0 +1,154 @@ +""" + +Revision ID: 0423_daily_sms_limit_updated +Revises: 0422_add_billable_units +Create Date: 2022-09-21 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0423_daily_sms_limit_updated" +down_revision = "0422_add_billable_units" + +near_sms_limit_template_id = current_app.config["NEAR_DAILY_SMS_LIMIT_TEMPLATE_ID"] +at_sms_limit_template_id = current_app.config["REACHED_DAILY_SMS_LIMIT_TEMPLATE_ID"] +daily_sms_limit_updated_id = current_app.config["DAILY_SMS_LIMIT_UPDATED_TEMPLATE_ID"] + +template_ids = [near_sms_limit_template_id, at_sms_limit_template_id, daily_sms_limit_updated_id] + + +def upgrade(): + template_insert = """ + INSERT INTO templates (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', false) + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', false) + """ + + near_sms_limit_content = "\n".join( + [ + "Hello ((name)),", + "", + "((service_name)) just reached 80% of its daily SMS limit of ((message_limit_en)) messages. Your service will be blocked from sending SMS if you go above the daily limit by the end of the day.", + "", + "You can request a limit increase by [contacting us](((contact_url))).", + "", + "The GC Notify team", + "", + "___", + "", + "Bonjour ((name)),", + "", + "((service_name)) vient d’atteindre 80% de sa limite quotidienne de ((message_limit_fr)) messages. Votre service ne pourra plus envoyer de messages si vous allez au-delà de votre limite d’ici la fin de journée.", + "", + "Vous pouvez demander à augmenter cette limite en [nous contactant](((contact_url))).", + "", + "L’équipe GC Notification", + ] + ) + + reached_sms_limit_content = "\n".join( + [ + "Hello ((name)),", + "", + "((service_name)) has reached its daily SMS limit of ((message_limit_en)) messages. Your service has been blocked from sending SMS messages until tomorrow.", + "", + "You can request a limit increase by [contacting us](((contact_url))).", + "", + "The GC Notify team", + "", + "___", + "", + "Bonjour ((name)),", + "", + "((service_name)) vient d’atteindre sa limite quotidienne de ((message_limit_fr)) messages. Votre service ne peut plus envoyer de messages jusqu’à demain.", + "", + "Vous pouvez demander à augmenter cette limite en [nous contactant](((contact_url))).", + "", + "L’équipe GC Notification", + ] + ) + + daily_sms_limit_updated_content = "\n".join( + [ + "Hello ((name)),", + "", + "The daily SMS limit of ((service_name)) has just been updated. You can now send ((message_limit_en)) SMS messages per day. This new limit is effective now.", + "", + "The GC Notify team", + "", + "___", + "", + "Bonjour ((name)),", + "", + "La limite quotidienne de ((service_name)) a été mise à jour. Vous pouvez désormais envoyer ((message_limit_fr)) messages par jour. Ce changement est effectif dès maintenant.", + "", + "L’équipe GC Notification", + ] + ) + + templates = [ + { + "id": near_sms_limit_template_id, + "name": "Near daily SMS limit", + "subject": "Action required: 80% of daily SMS sending limit reached for ((service_name)) | Action requise: 80% de la limite d’envoi quotidienne atteinte pour ((service_name))", + "content": near_sms_limit_content, + }, + { + "id": at_sms_limit_template_id, + "name": "Daily SMS limit reached", + "subject": "Action required: Daily SMS sending limit reached for ((service_name)) | Action requise: Limite d’envoi quotidienne atteinte pour ((service_name)) )", + "content": reached_sms_limit_content, + }, + { + "id": daily_sms_limit_updated_id, + "name": "Daily SMS limit updated", + "subject": "Daily SMS sending limit updated for ((service_name)) | Limite d’envoi quotidienne mise à jour pour ((service_name))", + "content": daily_sms_limit_updated_content, + }, + ] + + for template in templates: + op.execute( + template_insert.format( + template["id"], + template["name"], + "email", + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + "normal", + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + "email", + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + "normal", + ) + ) + + +def downgrade(): + for template_id in template_ids: + op.execute("DELETE FROM notifications WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM notification_history WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM template_redacted WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM templates_history WHERE id = '{}'".format(template_id)) + op.execute("DELETE FROM templates WHERE id = '{}'".format(template_id)) diff --git a/migrations/versions/0424_sms_templates_in_redacted.py b/migrations/versions/0424_sms_templates_in_redacted.py new file mode 100644 index 0000000000..ca8b53c449 --- /dev/null +++ b/migrations/versions/0424_sms_templates_in_redacted.py @@ -0,0 +1,42 @@ +""" + +Revision ID: 0424_sms_templates_in_redacted +Revises: 0423_daily_sms_limit_updated +Create Date: 2022-10-13 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0424_sms_templates_in_redacted" +down_revision = "0423_daily_sms_limit_updated" + +near_sms_limit_template_id = current_app.config["NEAR_DAILY_SMS_LIMIT_TEMPLATE_ID"] +at_sms_limit_template_id = current_app.config["REACHED_DAILY_SMS_LIMIT_TEMPLATE_ID"] +daily_sms_limit_updated_id = current_app.config["DAILY_SMS_LIMIT_UPDATED_TEMPLATE_ID"] + +template_ids = [near_sms_limit_template_id, at_sms_limit_template_id, daily_sms_limit_updated_id] + + +def upgrade(): + for template_id in template_ids: + op.execute( + """ + INSERT INTO template_redacted + ( + template_id, + redact_personalisation, + updated_at, + updated_by_id + ) VALUES ( '{}', false, current_timestamp, '{}' ) + """.format( + template_id, current_app.config["NOTIFY_USER_ID"] + ) + ) + + +def downgrade(): + for template_id in template_ids: + op.execute("DELETE FROM template_redacted WHERE template_id = '{}'".format(template_id)) diff --git a/migrations/versions/0425_update_system_templates.py b/migrations/versions/0425_update_system_templates.py new file mode 100644 index 0000000000..0595735aaf --- /dev/null +++ b/migrations/versions/0425_update_system_templates.py @@ -0,0 +1,273 @@ +""" + +Revision ID: 0425_update_system_templates +Revises: 0424_sms_templates_in_redacted +Create Date: 2022-09-21 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0425_update_system_templates" +down_revision = "0424_sms_templates_in_redacted" + +templates = [ + { + "id": current_app.config["ALREADY_REGISTERED_EMAIL_TEMPLATE_ID"], + "name": "Your Notify account", + "template_type": "email", + "content": """[[en]]\r\nYou already have a GC Notify account with this email address.\r\n\r\n[Sign in](((signin_url)) ""Sign in"")\r\n\r\nIf you’ve forgotten your password, you can reset it here: [Password reset](((forgot_password_url)) ""Password reset"")\r\n\r\nIf you didn’t try to register for an account recently, please contact us: ((feedback_url))\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\nVous avez déjà un compte Notification GC avec cette adresse courriel.\r\n\r\n[Connectez-vous](((signin_url)) ""Connectez-vous"")\r\n\r\nSi vous avez oublié votre mot de passe, vous pouvez le réinitialiser ici: [Réinitialisation du mot de passe](((Forgot_password_url)) ""Réinitialisation du mot de passe"")\r\n\r\nSi vous n''avez pas essayé de vous connecter à un compte récemment, veuillez communiquer avec nous: ((feedback_url))\r\n[[/fr]]""", + "subject": "Your account | Votre compte", + "process_type": "priority", + }, + { + "id": current_app.config["ORGANISATION_INVITATION_EMAIL_TEMPLATE_ID"], + "name": "Notify organisation invitation email", + "template_type": "email", + "content": """[[en]]\r\n((user_name)) has invited you to collaborate on ((organisation_name)) on GC Notify.\r\n\r\nGC Notify makes it easy to keep people updated by helping you send emails and text messages.\r\n\r\nTo create an account on GC Notify, use this link:\r\n((url))\r\n\r\nThis invitation will stop working at midnight tomorrow. This is to keep ((organisation_name)) secure.\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\n((user_name)) vous a invité à collaborer sure ((organisation_name)) dans Notification GC.\r\n\r\nNotification GC facilite la mise à jour des personnes en vous aidant à envoyer des courriels et messages texte.\r\n\r\nUtilisez ce lien pour créer un compte sur Notification GC:\r\n((url))\r\n\r\nCette invitation cessera de fonctionner à minuit demain. Ceci est de garder ((organisation_name)) sécurisé.\r\n[[/fr]]""", + "subject": "((user_name)) has invited you to collaborate | ((user_name)) vous a invité à collaborer", + "process_type": "priority", + }, + { + "id": current_app.config["EMAIL_2FA_TEMPLATE_ID"], + "name": "Notify email verify code", + "template_type": "email", + "content": """[[en]]\r\nHi ((name)),\r\n\r\n((verify_code)) is your security code to log in to GC Notify.\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\nBonjour ((name)),\r\n\r\n((verify_code)) est votre code de sécurité pour vous connecter à Notification GC.\r\n[[/fr]]""", + "subject": "Sign in | Connectez-vous", + "process_type": "priority", + }, + { + "id": current_app.config["SMS_CODE_TEMPLATE_ID"], + "name": "Notify SMS verify code", + "template_type": "sms", + "content": """((verify_code)) is your GC Notify authentication code | ((verify_code)) est votre code d''authentification de Notification GC""", + "subject": "None", + "process_type": "priority", + }, + { + "id": current_app.config["PASSWORD_RESET_TEMPLATE_ID"], + "name": "Notify password reset email", + "template_type": "email", + "content": """[[en]]\r\nHi ((user_name)),\r\n\r\nWe received a request to reset your password on GC Notify.\r\n\r\nIf you didn''t request this email, you can ignore it – your password has not been changed.\r\n\r\nTo reset your password, click this link:\r\n[Password reset](((url)) ""Password reset"")\r\n[[/en]]\r\n\r\n___\r\n\r\n[[fr]]\r\nBonjour ((user_name)),\r\n\r\nNous avons reçu une demande de réinitialisation de votre mot de passe dans Notification GC.\r\n\r\nSi vous n''avez pas demandé ce courriel, vous pouvez l''ignorer - votre mot de passe n''a pas été changé.\r\n\r\nPour réinitialiser votre mot de passe, cliquez sur ce lien:\r\n[Réinitialisation du mot de passe](((url)) ""Réinitialisation du mot de passe"")\r\n[[/fr]]""", + "subject": "Reset your password | Réinitialiser votre mot de passe", + "process_type": "priority", + }, + { + "id": current_app.config["INVITATION_EMAIL_TEMPLATE_ID"], + "name": "Notify invitation email", + "template_type": "email", + "content": """[[en]]\r\n((user_name)) has invited you to collaborate on ((service_name)) on GC Notify.\r\n\r\nGC Notify makes it easy to keep people updated by helping you send emails and text messages.\r\n\r\nTo accept the invitation, use this link:\r\n((url))\r\n\r\nThis invitation will stop working at midnight tomorrow. This is to keep ((service_name)) secure.\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\n((user_name)) vous a invité à collaborer sur ((service_name)) dans Notification GC.\r\n\r\nNotification GC facilite la mise à jour des personnes en vous aidant à envoyer des courriels et des messages texte.\r\n\r\nUtilisez ce lien pour accepter l''invitation :\r\n((url))\r\n\r\nCette invitation cessera de fonctionner à minuit demain. C''est pour garder ((service_name)) sécurisé.\r\n[[/fr]]""", + "subject": "((user_name)) invited you to collaborate | ((user_name)) vous a invité à collaborer", + "process_type": "priority", + }, + { + "id": current_app.config["MOU_SIGNER_RECEIPT_TEMPLATE_ID"], + "name": "MOU Signed By Receipt (not in use)", + "template_type": "email", + "content": """[[en]]\r\nHi ((signed_by_name)),\r\n\r\n((org_name)) has accepted the GC Notify data sharing and financial agreement. \r\n\r\nIf you need another copy of the agreement you can download it here: ((mou_link))\r\n\r\nThanks,\r\nThe GC Notify team\r\nhttps://notification.canada.ca\r\n[[/en]]""", + "subject": "You’ve accepted the GC Notify data sharing and financial agreement", + "process_type": "priority", + }, + { + "id": current_app.config["MOU_SIGNED_ON_BEHALF_ON_BEHALF_RECEIPT_TEMPLATE_ID"], + "name": "MOU Signed On Behalf Of Receipt - On Behalf Of (not in use)", + "template_type": "email", + "content": """[[en]]\r\nHi ((on_behalf_of_name)),\r\n\r\n((signed_by_name)) has accepted the GC Notify data sharing and financial agreement on your behalf, for ((org_name)).\r\n\r\nGC Notify lets teams in the public sector send emails and text messages. It’s built and run by a team at the Canadian Digital Service.\r\n\r\nIf you need another copy of the agreement you can download it here: ((mou_link))\r\n\r\nThanks,\r\nThe GC Notify team\r\nhttps://notification.canada.ca \r\n[[/en]]""", + "subject": "((org_name)) has accepted the GC Notify data sharing and financial agreement", + "process_type": "priority", + }, + { + "id": current_app.config["ACCOUNT_CHANGE_TEMPLATE_ID"], + "name": "Account update", + "template_type": "email", + "content": """[[en]]\r\nYour GC Notify user account information was changed on ((base_url)).\r\n\r\nUpdated information: ((change_type_en))\r\n\r\nIf you did not make this change, [contact us](((contact_us_url)) ""contact us"") immediately.\r\n[[/en]]\r\n\r\n___\r\n\r\n[[fr]]\r\nLes renseignements de votre compte d''utilisateur ont été modifiées sur ((base_url)).\r\n\r\nRenseignements mis à jour : ((change_type_fr))\r\n\r\nSi vous n''avez pas effectué ce changement, [communiquez avec nous](((contact_us_url)) ""communiquez avec nous"") immédiatement.\r\n[[/fr]]""", + "subject": "Account information changed | Renseignements de compte modifiés", + "process_type": "priority", + }, + { + "id": current_app.config["NEAR_DAILY_LIMIT_TEMPLATE_ID"], + "name": "Near combined daily limit", + "template_type": "email", + "content": """Hello ((name)),\r\n\r\n((service_name)) just reached 80% of its daily limit of ((message_limit_en)) messages. Your service will be blocked from sending if you go above the daily limit by the end of the day.\r\n\r\nYou can request a limit increase by [contacting us](((contact_url))).\r\n\r\nThe GC Notify team\r\n\r\n___\r\n\r\nBonjour ((name)),\r\n\r\n((service_name)) vient d’atteindre 80% de sa limite quotidienne de ((message_limit_fr)) messages. Votre service ne pourra plus envoyer de messages si vous allez au-delà de votre limite d’ici la fin de journée.\r\n\r\nVous pouvez demander à augmenter cette limite en [nous contactant](((contact_url))).\r\n\r\nL’équipe Notification GC""", + "subject": "Action required: 80% of daily sending limit reached for ((service_name)) | Action requise: 80% de la limite d’envoi quotidienne atteinte pour ((service_name))", + "process_type": "normal", + }, + { + "id": current_app.config["SERVICE_NOW_LIVE_TEMPLATE_ID"], + "name": """Automated "You''re now live" message""", + "template_type": "email", + "content": """[[en]]\r\nHello ((name)),\r\n\r\n\r\n((service_name)) is now live on GC Notify.\r\n\r\nYou’re all set to send notifications outside your team.\r\n\r\n\r\nYou can send up to ((message_limit_en)) messages per day.\r\n\r\nIf you ever need to send more messages, [contact us](((contact_us_url)) ""contact us"").\r\n\r\n\r\n[Sign in to GC Notify](((signin_url)) ""Sign in to GC Notify"")\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\nBonjour ((name)),\r\n\r\n\r\n((service_name)) est maintenant activé sur Notification GC.\r\n\r\nVous êtes prêts à envoyer des notifications en dehors de votre équipe.\r\n\r\n\r\nVous pouvez envoyer jusqu’à ((message_limit_fr)) messages par jour.\r\n\r\nSi jamais vous avez besoin d’envoyer plus de messages, [communiquez avec nous](((contact_us_url)) ""communiquez avec nous"").\r\n\r\n\r\n[Connectez-vous à Notification GC](((signin_url)) ""Connectez-vous à Notification GC"")\r\n[[/fr]]""", + "subject": "Your service is now live | Votre service est maintenant activé", + "process_type": "priority", + }, + { + "id": current_app.config["DAILY_SMS_LIMIT_UPDATED_TEMPLATE_ID"], + "name": "Daily SMS limit updated", + "template_type": "email", + "content": """(la version française suit)\r\n\r\nHello ((name)),\r\n\r\nYou can now send ((message_limit_en)) text fragments per day. \r\n\r\nThe GC Notify Team\r\n\r\n___\r\n\r\nBonjour ((name)),\r\n\r\nVous pouvez désormais envoyer ((message_limit_fr)) fragments de message texte par jour. \r\n\r\nL’équipe Notification GC""", + "subject": "We’ve updated the daily limit for ((service_name)) | Limite quotidienne d’envoi mise à jour pour ((service_name)).", + "process_type": "priority", + }, + { + "id": current_app.config["BRANDING_REQUEST_TEMPLATE_ID"], + "name": "Support - Branding Request", + "template_type": "email", + "content": """[[en]]\r\nA new logo has been uploaded by ((email)) for the following service:\r\n\r\nService id: ((service_id))\r\nService name: ((service_name))\r\n\r\nLogo filename: ((url))\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\nUn nouveau logo a été téléchargé par ((email)) pour le service suivant :\r\n\r\nIdentifiant du service : ((service_id))\r\nNom du service : ((service_name))\r\n\r\nNom du fichier du logo : ((url))\r\n[[/fr]]""", + "subject": "Branding change request for ((service_name)) | Demande de changement d''image de marque pour ((service_name))", + "process_type": "priority", + }, + { + "id": current_app.config["NO_REPLY_TEMPLATE_ID"], + "name": "No Reply", + "template_type": "email", + "content": """[[en]]\r\nYour message was not delivered.\r\n\r\nThe email address ((sending_email_address)) is not able to receive messages since this feature has not been set by the sender.\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\nVotre message n’a pas été livré.\r\n\r\nL’adresse courriel ((sending_email_address)) ne peut pas recevoir de messages car cette fonction n’a pas été définie par l’expéditeur.\r\n[[/fr]]""", + "subject": "Message not delivered | Message non livré", + "process_type": "normal", + }, + { + "id": current_app.config["TEAM_MEMBER_EDIT_MOBILE_TEMPLATE_ID"], + "name": "Phone number changed by service manager", + "template_type": "sms", + "content": """Your mobile number was changed by ((servicemanagername)). Next time you sign in, your GC Notify authentication code will be sent to this phone. | Votre numéro de téléphone mobile a été modifié par ((servicemanagername)). Lors de votre prochaine connexion, votre code d''authentification de Notification GC sera envoyé à ce téléphone.""", + "subject": "None", + "process_type": "priority", + }, + { + "id": current_app.config["REPLY_TO_EMAIL_ADDRESS_VERIFICATION_TEMPLATE_ID"], + "name": "Verify email reply-to address for a service", + "template_type": "email", + "content": """[[en]]\r\nHi,\r\n\r\nThis address has been provided as a reply-to email address for a GC Notify account.\r\n\r\nAny replies from users to emails they receive through GC Notify will come back to this email address.\r\n\r\nThis is just a quick check to make sure the address is valid.\r\n\r\nNo need to reply.\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\nBonjour,\r\n\r\nCette adresse a été fournie comme adresse courriel de réponse pour un compte Notification GC.\r\n\r\nToute réponse des utilisateurs aux courriel qu''ils reçoivent via Notification GC reviendra à cette adresse courriel.\r\n\r\nCeci est juste une vérification rapide pour vous assurer que cette adresse courriel est valide.\r\n\r\nPas besoin de répondre.\r\n[[/fr]]""", + "subject": "Your reply-to email address | Votre adresse courriel de réponse", + "process_type": "priority", + }, + { + "id": current_app.config["REACHED_DAILY_SMS_LIMIT_TEMPLATE_ID"], + "name": "Daily SMS limit reached", + "template_type": "email", + "content": """(la version française suit)\r\n\r\nHello ((name)),\r\n\r\n((service_name)) has sent ((message_limit_en)) text message fragments today. \r\n\r\nIf a text message is long, it travels in fragments. The fragments assemble into 1 message for the recipient. Each fragment counts towards your daily limit.\r\n\r\nThe number of fragments may be higher than the number of recipients. Complex factors determine how messages split into fragments. These factors include character count and type of characters used. \r\n\r\nYou can send more messages tomorrow. \r\n\r\nTo request a limit increase, [contact us](((contact_url))). We’ll respond within 1 business day.\r\n\r\nThe GC Notify team\r\n\r\n___\r\n\r\nBonjour ((name)),\r\n\r\nAujourd’hui, ((service_name)) a envoyé ((message_limit_fr)) fragments de message texte. \r\n\r\nLorsqu’un message texte est long, il se fragmente lors de la transmission. Tous les fragments sont rassemblés pour former un message unique pour le destinataire. Chaque fragment compte dans votre limite quotidienne.\r\n\r\nLe nombre de fragments peut être supérieur au nombre de destinataires. La division des messages en fragments dépend de facteurs complexes, dont le nombre de caractères et le type de caractères utilisés. \r\n\r\nVous pourrez à nouveau envoyer des messages dès demain. \r\n\r\nVeuillez [nous contacter](((contact_url))) si vous désirez augmenter votre limite d’envoi. Nous vous répondrons en un jour ouvrable.\r\n\r\nL’équipe Notification GC""", + "subject": "((service_name)) has reached its daily limit for text fragments | Limite quotidienne d’envoi de fragments de message texte atteinte pour ((service_name)).", + "process_type": "normal", + }, + { + "id": current_app.config["NEAR_DAILY_SMS_LIMIT_TEMPLATE_ID"], + "name": "Near daily SMS limit", + "template_type": "email", + "content": """(la version française suit)\r\n\r\n\r\nHello ((name)),\r\n\r\nIf a text message is long, it travels in fragments. The fragments assemble into 1 message for the recipient. Each fragment counts towards your daily limit.\r\n\r\nThe number of fragments may be higher than the number of recipients. Complex factors determine how messages split into fragments. These factors include character count and type of characters used.\r\n\r\n((service_name)) can send ((message_limit_en)) text fragments per day. You’ll be blocked from sending if you exceed that limit before the end of the day. \r\n\r\nTo request a limit increase, [contact us](((contact_url))). We’ll respond within 1 business day.\r\n\r\nThe GC Notify team\r\n\r\n___\r\n\r\nBonjour ((name)),\r\n\r\nLorsqu’un message texte est long, il se fragmente lors de la transmission. Tous les fragments sont rassemblés pour former un message unique pour le destinataire. Chaque fragment compte dans votre limite quotidienne.\r\n\r\nLe nombre de fragments peut être supérieur au nombre de destinataires. La division des messages en fragments dépend de facteurs complexes, dont le nombre de caractères et le type de caractères utilisés.\r\n\r\n((service_name)) peut envoyer ((message_limit_fr)) fragments de message texte par jour. Si vous atteignez cette limite avant la fin de la journée, vous ne pourrez plus envoyer de messages texte. \r\n\r\nVeuillez [nous contacter](((contact_url))) si vous désirez augmenter votre limite d’envoi. Nous vous répondrons en un jour ouvrable.\r\n\r\nL’équipe Notification GC""", + "subject": "((service_name)) is near its daily limit for text fragments. | La limite quotidienne d’envoi de fragments de message texte est presque atteinte pour ((service_name)).", + "process_type": "normal", + }, + { + "id": current_app.config["DAILY_LIMIT_UPDATED_TEMPLATE_ID"], + "name": "Combined daily limit updated", + "template_type": "email", + "content": """Hello ((name)),\r\n\r\nThe daily limit of ((service_name)) has just been updated. You can now send ((message_limit_en)) messages per day. This new limit is effective now.\r\n\r\nThe GC Notify team\r\n\r\n___\r\n\r\nBonjour ((name)),\r\n\r\nLa limite quotidienne de ((service_name)) a été mise à jour. Vous pouvez désormais envoyer ((message_limit_fr)) messages par jour. Ce changement est effectif dès maintenant.\r\n\r\nL’équipe Notification GC""", + "subject": "Daily sending limit updated for ((service_name)) | Limite d’envoi quotidienne mise à jour pour ((service_name))", + "process_type": "normal", + }, + { + "id": current_app.config["MOU_SIGNED_ON_BEHALF_SIGNER_RECEIPT_TEMPLATE_ID"], + "name": "MOU Signed On Behalf Of Receipt - Signed by (not in use)", + "template_type": "email", + "content": """[[en]]\r\nHi ((signed_by_name)),\r\n\r\n((org_name)) has accepted the GC Notify data sharing and financial agreement. We’ve emailed ((on_behalf_of_name)) to let them know.\r\n\r\nIf you need another copy of the agreement you can download it here: ((mou_link))\r\n\r\nThanks,\r\nThe GC Notify team\r\nhttps://notification.canada.ca\r\n[[/en]]""", + "subject": "You’ve accepted the GC Notify data sharing and financial agreement", + "process_type": "priority", + }, + { + "id": current_app.config["TEAM_MEMBER_EDIT_EMAIL_TEMPLATE_ID"], + "name": "Email address changed by service manager", + "template_type": "email", + "content": """[[en]]\r\nHi ((name)),\r\n\r\n((servicemanagername)) changed your GC Notify account email address to:\r\n\r\n((email address))\r\n\r\nYou’ll need to use this email address next time you sign in.\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\nBonjour ((name)),\r\n\r\n((servicemanagername)) a modifié l''adresse courriel de votre compte Notification GC :\r\n\r\n((email address))\r\n\r\nVous devrez utiliser cette adresse courriel lors de votre prochaine connexion.\r\n[[/fr]]""", + "subject": "Your email address has changed | Votre adresse courriel a changé", + "process_type": "priority", + }, + { + "id": current_app.config["MOU_NOTIFY_TEAM_ALERT_TEMPLATE_ID"], + "name": "MOU Signed Notify Team Alert (not in use)", + "template_type": "email", + "content": """[[en]]\r\n((signed_by_name)) accepted the data sharing and financial agreement for ((org_name)).\r\n\r\nSee how ((org_name)) is using GC Notify here: ((org_dashboard_link))\r\n[[/en]]""", + "subject": "Someone signed an MOU for an org on GC Notify", + "process_type": "priority", + }, + { + "id": current_app.config["FORCED_PASSWORD_RESET_TEMPLATE_ID"], + "name": "Notify forced-password reset email", + "template_type": "email", + "content": """Hi ((user_name)),\r\n\r\nTo reset your password, click this link:\r\n\r\n[Password reset](((url))?lang=en)\r\n\r\nThis is your unique link. Do not share this link with anyone.\r\n\r\nIf you didn’t request this email, please [contact us](https://notification.canada.ca/contact?lang=en).\r\n\r\n___\r\n\r\n\r\nBonjour ((user_name)),\r\n\r\nPour réinitialiser votre mot de passe, veuillez cliquer sur le lien suivant :\r\n\r\n[Réinitialisation de votre mot de passe](((url))?lang=fr)\r\n\r\nCe lien est unique. Ne le transmettez à personne. \r\n\r\nSi vous n’avez pas demandé ce courriel, veuillez [nous contacter](https://notification.canada.ca/contact?lang=fr).""", + "subject": "Reset your password | Réinitialiser votre mot de passe", + "process_type": "normal", + }, + { + "id": current_app.config["CHANGE_EMAIL_CONFIRMATION_TEMPLATE_ID"], + "name": "Confirm new email address", + "template_type": "email", + "content": """[[en]]\r\nHi ((name)),\r\n\r\nClick this link to confirm the new email address for your GC Notify account:\r\n((url))\r\n \r\nIf you did not try to change your email address, [contact us](\r\n((feedback_url)) ""contact us"").\r\n[[/en]]\r\n___\r\n\r\n[[fr]]\r\nBonjour ((name)),\r\n\r\nCliquez sur ce lien pour confirmer la nouvelle adresse courriel de voter compete Notification GC :\r\n((url))\r\n \r\nSi vous n''avez pas essayé de changer votre adresse courriel, [communiquez avec nous](\r\n((feedback_url)) ""communiquez avec nous"").\r\n[[/fr]]""", + "subject": "Confirm new email address | Confirmer votre nouvelle adresse courriel", + "process_type": "priority", + }, + { + "id": current_app.config["NEW_USER_EMAIL_VERIFICATION_TEMPLATE_ID"], + "name": "Notify email verification code", + "template_type": "email", + "content": """[[en]]\r\nHi ((name)),\r\n\r\nTo complete your registration for GC Notify, please click the link:\r\n((url))\r\n[[/en]]\r\n\r\n___\r\n\r\n[[fr]]\r\nBonjour ((name)),\r\n\r\nPour compléter votre inscription à Notification GC, veuillez cliquer sur le lien :\r\n((url))\r\n[[/fr]]""", + "subject": "Confirm your registration | Confirmer votre inscription", + "process_type": "priority", + }, + { + "id": current_app.config["REACHED_DAILY_LIMIT_TEMPLATE_ID"], + "name": "Combined daily limit reached", + "template_type": "email", + "content": """Hello ((name)),\r\n\r\n((service_name)) has reached its daily limit of ((message_limit_en)) messages. Your service has been blocked from sending messages until tomorrow.\r\n\r\nYou can request a limit increase by [contacting us](((contact_url))).\r\n\r\nThe GC Notify team\r\n\r\n___\r\n\r\nBonjour ((name)),\r\n\r\n((service_name)) vient d’atteindre sa limite quotidienne de ((message_limit_fr)) messages. Votre service ne peut plus envoyer de messages jusqu’à demain.\r\n\r\nVous pouvez demander à augmenter cette limite en [nous contactant](((contact_url))).\r\n\r\nL’équipe Notification GC""", + "subject": "Action required: Daily sending limit reached for ((service_name)) | Action requise: Limite d’envoi quotidienne atteinte pour ((service_name)) )", + "process_type": "normal", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0426_add_bounce_type_columns.py b/migrations/versions/0426_add_bounce_type_columns.py new file mode 100644 index 0000000000..d7361230af --- /dev/null +++ b/migrations/versions/0426_add_bounce_type_columns.py @@ -0,0 +1,69 @@ +"""empty message + +Revision ID: 0426_add_bounce_type_columns +Revises: 0425_update_system_templates +Create Date: 2017-04-25 11:34:43.229494 + +""" + +# revision identifiers, used by Alembic. +revision = "0426_add_bounce_type_columns" +down_revision = "0425_update_system_templates" + +import sqlalchemy as sa +from alembic import op + + +# option 1 +def upgrade(): + # 1 - add feedback types to notifications/notifications_history table + feedback_types = sa.Enum("hard-bounce", "soft-bounce", name="notification_feedback_types") + feedback_types.create(op.get_bind()) + op.add_column("notifications", sa.Column("feedback_type", feedback_types, nullable=True)) + op.add_column("notification_history", sa.Column("feedback_type", feedback_types, nullable=True)) + + # 2 - add feedback sub types to notifications/notifications_history table + feedback_subtypes = sa.Enum( + "general", + "no-email", + "suppressed", + "on-account-suppression-list", + "mailbox-full", + "message-too-large", + "content-rejected", + "attachment-rejected", + name="notification_feedback_subtypes", + ) + feedback_subtypes.create(op.get_bind()) + op.add_column("notifications", sa.Column("feedback_subtype", feedback_subtypes, nullable=True)) + op.add_column("notification_history", sa.Column("feedback_subtype", feedback_subtypes, nullable=True)) + + # 3 - add ses_feedback_id to notifications/notifications_history table + op.add_column("notifications", sa.Column("ses_feedback_id", sa.String(), nullable=True)) + op.add_column("notification_history", sa.Column("ses_feedback_id", sa.String(), nullable=True)) + + # 4 - add ses_feedback_date to notifications/notifications_history table + op.add_column("notifications", sa.Column("ses_feedback_date", sa.DateTime(), nullable=True)) + op.add_column("notification_history", sa.Column("ses_feedback_date", sa.DateTime(), nullable=True)) + + +def downgrade(): + # 1 - drop feedback_type from notifications/notification_history table + op.drop_column("notifications", "feedback_type") + op.drop_column("notification_history", "feedback_type") + op.get_bind() + op.execute("DROP TYPE notification_feedback_types") + + # 2 - drop feedback_subtype from notifications/notification_history table + op.drop_column("notifications", "feedback_subtype") + op.drop_column("notification_history", "feedback_subtype") + op.get_bind() + op.execute("DROP TYPE notification_feedback_subtypes") + + # 3 - drop ses_feedback_id from notifications/notification_history table + op.drop_column("notifications", "ses_feedback_id") + op.drop_column("notification_history", "ses_feedback_id") + + # 4 - drop ses_feedback_date from notifications/notification_history table + op.drop_column("notifications", "ses_feedback_date") + op.drop_column("notification_history", "ses_feedback_date") diff --git a/migrations/versions/0427_add_bounce_type_indices.py b/migrations/versions/0427_add_bounce_type_indices.py new file mode 100644 index 0000000000..282cd5ee57 --- /dev/null +++ b/migrations/versions/0427_add_bounce_type_indices.py @@ -0,0 +1,27 @@ +"""empty message + +Revision ID: 0427_add_bounce_type_indices +Revises: 0426_add_bounce_type_columns +Create Date: 2017-04-25 11:34:43.229494 + +""" + +# revision identifiers, used by Alembic. +revision = "0427_add_bounce_type_indices" +down_revision = "0426_add_bounce_type_columns" + +from alembic import op + + +# option 1 +def upgrade(): + op.execute("COMMIT") + op.create_index(op.f("ix_notifications_feedback_type"), "notifications", ["feedback_type"], postgresql_concurrently=True) + op.create_index( + op.f("ix_notification_history_feedback_type"), "notification_history", ["feedback_type"], postgresql_concurrently=True + ) + + +def downgrade(): + op.drop_index(op.f("ix_notifications_feedback_type"), table_name="notifications") + op.drop_index(op.f("ix_notification_history_feedback_type"), table_name="notification_history") diff --git a/migrations/versions/0428_add_bounce_type_known.py b/migrations/versions/0428_add_bounce_type_known.py new file mode 100644 index 0000000000..df59f331a6 --- /dev/null +++ b/migrations/versions/0428_add_bounce_type_known.py @@ -0,0 +1,38 @@ +"""empty message + +Revision ID: 0428_add_bounce_type_known +Revises: 0427_add_bounce_type_indices +Create Date: 2017-04-25 11:34:43.229494 + +""" + +# revision identifiers, used by Alembic. +revision = "0428_add_bounce_type_known" +down_revision = "0427_add_bounce_type_indices" + +from alembic import op + + +# option 1 +def upgrade(): + # prevent from being executed in a transaction block + op.execute("COMMIT") + + op.execute("ALTER TYPE notification_feedback_types ADD VALUE 'unknown-bounce'") + op.execute("ALTER TYPE notification_feedback_subtypes ADD VALUE 'unknown-bounce-subtype'") + + +def downgrade(): + sql = f"""DELETE FROM pg_enum + WHERE enumlabel = 'unknown-bounce' + AND enumtypid = ( + SELECT oid FROM pg_type WHERE typname = 'notification_feedback_types' + )""" + op.execute(sql) + + sql = f"""DELETE FROM pg_enum + WHERE enumlabel = 'unknown-bounce-subtype' + AND enumtypid = ( + SELECT oid FROM pg_type WHERE typname = 'notification_feedback_subtypes' + )""" + op.execute(sql) diff --git a/migrations/versions/0429_add_organisation_notes.py b/migrations/versions/0429_add_organisation_notes.py new file mode 100644 index 0000000000..23122a6356 --- /dev/null +++ b/migrations/versions/0429_add_organisation_notes.py @@ -0,0 +1,32 @@ +""" + +Revision ID: 0429_add_organisation_notes +Revises: 0428_add_bounce_type_known +Create Date: 2023-03-09 16:02:27.798584 + +""" +import sqlalchemy as sa +from alembic import op + +revision = "0429_add_organisation_notes" +down_revision = "0428_add_bounce_type_known" + +user = "postgres" +timeout = 1200 # in seconds, i.e. 20 minutes +default = 1 + + +def upgrade(): + op.add_column( + "services", + sa.Column("organisation_notes", sa.String(), nullable=True), + ) + op.add_column( + "services_history", + sa.Column("organisation_notes", sa.String(), nullable=True), + ) + + +def downgrade(): + op.drop_column("services", "organisation_notes") + op.drop_column("services_history", "organisation_notes") diff --git a/migrations/versions/0430_add_contact_form_direct_email.py b/migrations/versions/0430_add_contact_form_direct_email.py new file mode 100644 index 0000000000..0257acab15 --- /dev/null +++ b/migrations/versions/0430_add_contact_form_direct_email.py @@ -0,0 +1,90 @@ +""" + +Revision ID: 0430_add_contact_form_email +Revises: 0429_add_organisation_notes +Create Date: 2023-03-28 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0430_add_contact_form_email" +down_revision = "0429_add_organisation_notes" + +contact_us_template_id = current_app.config["CONTACT_FORM_DIRECT_EMAIL_TEMPLATE_ID"] +template_ids = [contact_us_template_id] + + +def upgrade(): + template_insert = """ + INSERT INTO templates (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', false) + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', false) + """ + + contact_us_content = "\n".join( + [ + "Freshdesk integration has failed. Sending the following contact us form:", + "((contact_us_content))", + "", + "___", + "", + "L’intégration de Freshdesk a échoué. Envoi du formulaire de contact suivant :", + "", + "((contact_us_content))", + ] + ) + + templates = [ + { + "id": contact_us_template_id, + "name": "Contact form direct email", + "subject": "Notify Contact us form", + "content": contact_us_content, + }, + ] + + for template in templates: + op.execute( + template_insert.format( + template["id"], + template["name"], + "email", + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + "priority", + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + "email", + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + "priority", + ) + ) + + +def downgrade(): + for template_id in template_ids: + op.execute("DELETE FROM notifications WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM notification_history WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM template_redacted WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM templates_history WHERE id = '{}'".format(template_id)) + op.execute("DELETE FROM templates WHERE id = '{}'".format(template_id)) diff --git a/migrations/versions/0431_add_pt_organisation_type.py b/migrations/versions/0431_add_pt_organisation_type.py new file mode 100644 index 0000000000..5efe1cffa4 --- /dev/null +++ b/migrations/versions/0431_add_pt_organisation_type.py @@ -0,0 +1,25 @@ +""" + +Revision ID: 0431_add_pt_organisation_type +Revises: 0430_add_contact_form_email +Create Date: 2023-05-30 00:00:00 + +""" + +from alembic import op + +revision = "0431_add_pt_organisation_type" +down_revision = "0430_add_contact_form_email" + + +def upgrade(): + op.execute( + f""" + INSERT INTO organisation_types (name, is_crown, annual_free_sms_fragment_limit) + VALUES ('province_or_territory', null, 250000) + """ + ) + + +def downgrade(): + op.execute("DELETE FROM organisation_types WHERE name = 'province_or_territory'") diff --git a/migrations/versions/0432_daily_email_limit_templates.py b/migrations/versions/0432_daily_email_limit_templates.py new file mode 100644 index 0000000000..0be21409a1 --- /dev/null +++ b/migrations/versions/0432_daily_email_limit_templates.py @@ -0,0 +1,154 @@ +""" + +Revision ID: 0423_daily_email_limit_updated +Revises: 0422_add_billable_units +Create Date: 2022-09-21 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0432_daily_email_limit_templates" +down_revision = "0431_add_pt_organisation_type" + +near_email_limit_template_id = current_app.config["NEAR_DAILY_EMAIL_LIMIT_TEMPLATE_ID"] +at_email_limit_template_id = current_app.config["REACHED_DAILY_EMAIL_LIMIT_TEMPLATE_ID"] +daily_email_limit_updated_id = current_app.config["DAILY_EMAIL_LIMIT_UPDATED_TEMPLATE_ID"] + +template_ids = [near_email_limit_template_id, at_email_limit_template_id, daily_email_limit_updated_id] + + +def upgrade(): + template_insert = """ + INSERT INTO templates (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', false) + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', false) + """ + + near_email_limit_content = "\n".join( + [ + "Hello ((name)),", + "", + "((service_name)) just reached 80% of its daily email limit of ((message_limit_en)) messages. Your service will be blocked from sending email if you go above the daily limit by the end of the day.", + "", + "You can request a limit increase by [contacting us](((contact_url))).", + "", + "The GC Notify team", + "", + "___", + "", + "Bonjour ((name)),", + "", + "((service_name)) vient d’atteindre 80% de sa limite quotidienne de ((message_limit_fr)) courriels. Votre service ne pourra plus envoyer de courriels si vous allez au-delà de votre limite d’ici la fin de journée.", + "", + "Vous pouvez demander à augmenter cette limite en [nous contactant](((contact_url))).", + "", + "L’équipe GC Notification", + ] + ) + + reached_email_limit_content = "\n".join( + [ + "Hello ((name)),", + "", + "((service_name)) has reached its daily email limit of ((message_limit_en)) messages. Your service has been blocked from sending email messages until tomorrow.", + "", + "You can request a limit increase by [contacting us](((contact_url))).", + "", + "The GC Notify team", + "", + "___", + "", + "Bonjour ((name)),", + "", + "((service_name)) vient d’atteindre sa limite quotidienne de ((message_limit_fr)) courriels. Votre service ne peut plus envoyer de courriels jusqu’à demain.", + "", + "Vous pouvez demander à augmenter cette limite en [nous contactant](((contact_url))).", + "", + "L’équipe GC Notification", + ] + ) + + daily_email_limit_updated_content = "\n".join( + [ + "Hello ((name)),", + "", + "The daily email limit of ((service_name)) has just been updated. You can now send ((message_limit_en)) emails per day. This new limit is effective now.", + "", + "The GC Notify team", + "", + "___", + "", + "Bonjour ((name)),", + "", + "La limite quotidienne de courriels de ((service_name)) a été mise à jour. Vous pouvez désormais envoyer ((message_limit_fr)) courriels par jour. Ce changement est effectif dès maintenant.", + "", + "L’équipe GC Notification", + ] + ) + + templates = [ + { + "id": near_email_limit_template_id, + "name": "Near daily EMAIL limit", + "subject": "Action required: 80% of Daily email sending limit reached for ((service_name)) | Action requise: 80% de la limite d’envoi de courriels quotidienne atteinte pour ((service_name))", + "content": near_email_limit_content, + }, + { + "id": at_email_limit_template_id, + "name": "Daily EMAIL limit reached", + "subject": "Action required: Daily email sending limit reached for ((service_name)) | Action requise: Limite d’envoi de courriels quotidienne atteinte pour ((service_name))", + "content": reached_email_limit_content, + }, + { + "id": daily_email_limit_updated_id, + "name": "Daily EMAIL limit updated", + "subject": "Daily email sending limit updated for ((service_name)) | Limite d’envoi de courriels quotidienne mise à jour pour ((service_name))", + "content": daily_email_limit_updated_content, + }, + ] + + for template in templates: + op.execute( + template_insert.format( + template["id"], + template["name"], + "email", + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + "normal", + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + "email", + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + "normal", + ) + ) + + +def downgrade(): + for template_id in template_ids: + op.execute("DELETE FROM notifications WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM notification_history WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM template_redacted WHERE template_id = '{}'".format(template_id)) + op.execute("DELETE FROM templates_history WHERE id = '{}'".format(template_id)) + op.execute("DELETE FROM templates WHERE id = '{}'".format(template_id)) diff --git a/migrations/versions/0433_update_email_templates.py b/migrations/versions/0433_update_email_templates.py new file mode 100644 index 0000000000..cf37b12a41 --- /dev/null +++ b/migrations/versions/0433_update_email_templates.py @@ -0,0 +1,164 @@ +""" + +Revision ID: 0433_update_email_templates +Revises: 0432_daily_email_limit_templates +Create Date: 2023-08-08 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0433_update_email_templates" +down_revision = "0432_daily_email_limit_templates" + +near_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "((service_name)) can send ((message_limit_en)) emails per day. You’ll be blocked from sending if you exceed that limit before 7pm Eastern Time. Check [your current local time](https://nrc.canada.ca/en/web-clock/).", + "", + "To request a limit increase, [contact us](https://notification.canada.ca/contact). We’ll respond within 1 business day.", + "", + "The GC Notify team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "La limite quotidienne d’envoi est de ((message_limit_fr)) courriels par jour pour ((service_name)). Si vous dépassez cette limite avant 19 heures, heure de l’Est, vos envois seront bloqués.", + "", + "Comparez [les heures officielles au Canada](https://nrc.canada.ca/fr/horloge-web/).", + "", + "Veuillez [nous contacter](https://notification.canada.ca/contact) si vous souhaitez augmenter votre limite d’envoi. Nous vous répondrons en un jour ouvrable.", + "", + "L’équipe Notification GC", + ] +) + + +reached_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "((service_name)) has sent ((message_limit_en)) emails today.", + "", + "You can send more messages after 7pm Eastern Time. Compare [official times across Canada](https://nrc.canada.ca/en/web-clock/).", + "", + "To request a limit increase, [contact us](https://notification.canada.ca/contact). We’ll respond within 1 business day.", + "", + "The GC Notify team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "Aujourd’hui, ((message_limit_fr)) courriels ont été envoyés pour ((service_name)).", + "", + "Vous pourrez envoyer davantage de courriels après 19 heures, heure de l’Est. Comparez [les heures officielles au Canada](https://nrc.canada.ca/fr/horloge-web/).", + "", + "Veuillez [nous contacter](https://notification.canada.ca/contact) si vous désirez augmenter votre limite d’envoi. Nous vous répondrons en un jour ouvrable.", + "", + "L’équipe Notification GC", + ] +) + +updated_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "You can now send ((message_limit_en)) email messages per day.", + "", + "The GC Notify Team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "Vous pouvez désormais envoyer ((message_limit_fr)) courriels par jour.", + "", + "L’équipe Notification GC", + ] +) + +templates = [ + { + "id": current_app.config["NEAR_DAILY_EMAIL_LIMIT_TEMPLATE_ID"], + "name": "Near daily EMAIL limit", + "template_type": "email", + "content": near_content, + "subject": "((service_name)) is near its daily limit for emails. | La limite quotidienne d’envoi de courriels est presque atteinte pour ((service_name)).", + "process_type": "priority", + }, + { + "id": current_app.config["REACHED_DAILY_EMAIL_LIMIT_TEMPLATE_ID"], + "name": "Daily EMAIL limit reached", + "template_type": "email", + "content": reached_content, + "subject": "((service_name)) has reached its daily limit for email messages | La limite quotidienne d’envoi de courriels atteinte pour ((service_name)).", + "process_type": "priority", + }, + { + "id": current_app.config["DAILY_EMAIL_LIMIT_UPDATED_TEMPLATE_ID"], + "name": "Daily EMAIL limit updated", + "template_type": "email", + "content": updated_content, + "subject": "We’ve updated the daily email limit for ((service_name)) | Nous avons mis à jour la limite quotidienne de courriels pour ((service_name))", + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0434_update_email_templates_sms.py b/migrations/versions/0434_update_email_templates_sms.py new file mode 100644 index 0000000000..aaff5b3ea1 --- /dev/null +++ b/migrations/versions/0434_update_email_templates_sms.py @@ -0,0 +1,161 @@ +""" + +Revision ID: 0434_update_email_templates_sms +Revises: 0433_update_email_templates +Create Date: 2023-08-09 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0434_update_email_templates_sms" +down_revision = "0433_update_email_templates" + +near_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "((service_name)) can send ((message_limit_en)) text messages per day. You’ll be blocked from sending if you exceed that limit before ((limit_reset_time_et_12hr)) Eastern Time. Compare [official times across Canada](https://nrc.canada.ca/en/web-clock/).", + "", + "To request a limit increase, [contact us](((contact_url))). We’ll respond within 1 business day.", + "", + "The GC Notify team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "((service_name)) peut envoyer ((message_limit_fr)) messages texte par jour. Si vous atteignez cette limite avant ((limit_reset_time_et_24hr)) heures, heure de l’Est, vos envois seront bloqués. Comparez [les heures officielles au Canada](https://nrc.canada.ca/fr/horloge-web/).", + "", + "Veuillez [nous contacter](((contact_url))) si vous désirez augmenter votre limite d’envoi. Nous vous répondrons en un jour ouvrable.", + "", + "L’équipe Notification GC", + ] +) + + +reached_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "((service_name)) has sent ((message_limit_en)) text messages today.", + "", + "You can send more messages after ((limit_reset_time_et_12hr)) Eastern Time. Compare [official times across Canada](https://nrc.canada.ca/en/web-clock/).", + "", + "To request a limit increase, [contact us](((contact_url))). We’ll respond within 1 business day.", + "", + "The GC Notify team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "Aujourd’hui, ((message_limit_fr)) messages texte ont été envoyés pour ((service_name)).", + "", + "Vous pourrez envoyer davantage de messages texte après ((limit_reset_time_et_24hr)) heures, heure de l’Est. Comparez [les heures officielles au Canada](https://nrc.canada.ca/fr/horloge-web/).", + "", + "Veuillez [nous contacter](((contact_url))) si vous désirez augmenter votre limite d’envoi. Nous vous répondrons en un jour ouvrable.", + "", + "L’équipe Notification GC", + ] +) + +updated_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "You can now send ((message_limit_en)) text messages per day.", + "", + "The GC Notify team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "Vous pouvez désormais envoyer ((message_limit_fr)) messages texte par jour.", + "", + "L’équipe Notification GC", + ] +) + +templates = [ + { + "id": current_app.config["NEAR_DAILY_SMS_LIMIT_TEMPLATE_ID"], + "template_type": "email", + "subject": "((service_name)) is near its daily limit for text messages. | La limite quotidienne d’envoi de messages texte est presque atteinte pour ((service_name)).", + "content": near_content, + "process_type": "priority", + }, + { + "id": current_app.config["REACHED_DAILY_SMS_LIMIT_TEMPLATE_ID"], + "template_type": "email", + "subject": "((service_name)) has reached its daily limit for text messages. | La limite quotidienne d’envoi de messages texte est atteinte pour ((service_name)).", + "content": reached_content, + "process_type": "priority", + }, + { + "id": current_app.config["DAILY_SMS_LIMIT_UPDATED_TEMPLATE_ID"], + "template_type": "email", + "subject": "We’ve updated the daily limit for ((service_name)) | Limite quotidienne d’envoi mise à jour pour ((service_name)).", + "content": updated_content, + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + name = conn.execute("select name from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + template["name"] = name[0] + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0435_update_email_templates_2.py b/migrations/versions/0435_update_email_templates_2.py new file mode 100644 index 0000000000..70ba262bfd --- /dev/null +++ b/migrations/versions/0435_update_email_templates_2.py @@ -0,0 +1,136 @@ +""" + +Revision ID: 0435_update_email_templates_2 +Revises: 0434_update_email_templates_sms +Create Date: 2023-08-08 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0435_update_email_templates_2" +down_revision = "0434_update_email_templates_sms" + +near_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "((service_name)) can send ((message_limit_en)) emails per day. You’ll be blocked from sending if you exceed that limit before ((limit_reset_time_et_12hr)) Eastern Time. Check [your current local time](https://nrc.canada.ca/en/web-clock/).", + "", + "To request a limit increase, [contact us](https://notification.canada.ca/contact). We’ll respond within 1 business day.", + "", + "The GC Notify team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "La limite quotidienne d’envoi est de ((message_limit_fr)) courriels par jour pour ((service_name)). Si vous dépassez cette limite avant ((limit_reset_time_et_24hr)) heures, heure de l’Est, vos envois seront bloqués.", + "", + "Comparez [les heures officielles au Canada](https://nrc.canada.ca/fr/horloge-web/).", + "", + "Veuillez [nous contacter](https://notification.canada.ca/contact) si vous souhaitez augmenter votre limite d’envoi. Nous vous répondrons en un jour ouvrable.", + "", + "L’équipe Notification GC", + ] +) + + +reached_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "((service_name)) has sent ((message_limit_en)) emails today.", + "", + "You can send more messages after ((limit_reset_time_et_12hr)) Eastern Time. Compare [official times across Canada](https://nrc.canada.ca/en/web-clock/).", + "", + "To request a limit increase, [contact us](https://notification.canada.ca/contact). We’ll respond within 1 business day.", + "", + "The GC Notify team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "Aujourd’hui, ((message_limit_fr)) courriels ont été envoyés pour ((service_name)).", + "", + "Vous pourrez envoyer davantage de courriels après ((limit_reset_time_et_24hr)) heures, heure de l’Est. Comparez [les heures officielles au Canada](https://nrc.canada.ca/fr/horloge-web/).", + "", + "Veuillez [nous contacter](https://notification.canada.ca/contact) si vous désirez augmenter votre limite d’envoi. Nous vous répondrons en un jour ouvrable.", + "", + "L’équipe Notification GC", + ] +) + +templates = [ + { + "id": current_app.config["NEAR_DAILY_EMAIL_LIMIT_TEMPLATE_ID"], + "name": "Near daily EMAIL limit", + "template_type": "email", + "content": near_content, + "subject": "((service_name)) is near its daily limit for emails. | La limite quotidienne d’envoi de courriels est presque atteinte pour ((service_name)).", + "process_type": "priority", + }, + { + "id": current_app.config["REACHED_DAILY_EMAIL_LIMIT_TEMPLATE_ID"], + "name": "Daily EMAIL limit reached", + "template_type": "email", + "content": reached_content, + "subject": "((service_name)) has reached its daily limit for email messages | La limite quotidienne d’envoi de courriels atteinte pour ((service_name)).", + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0436_add_columns_api_keys.py b/migrations/versions/0436_add_columns_api_keys.py new file mode 100644 index 0000000000..44eb063048 --- /dev/null +++ b/migrations/versions/0436_add_columns_api_keys.py @@ -0,0 +1,26 @@ +""" + +Revision ID: 0436_add_columns_api_keys +Revises: 0435_update_email_templates_2.py +Create Date: 2023-09-01 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects.postgresql import JSONB + +revision = "0436_add_columns_api_keys" +down_revision = "0435_update_email_templates_2" + +user = "postgres" +timeout = 60 # in seconds, i.e. 1 minute + + +def upgrade(): + op.add_column("api_keys", sa.Column("compromised_key_info", JSONB, nullable=True)) + op.add_column("api_keys_history", sa.Column("compromised_key_info", JSONB, nullable=True)) + + +def downgrade(): + op.drop_column("api_keys", "compromised_key_info") + op.drop_column("api_keys_history", "compromised_key_info") diff --git a/migrations/versions/0437_email_templates_msgs_left.py b/migrations/versions/0437_email_templates_msgs_left.py new file mode 100644 index 0000000000..a9543a0b52 --- /dev/null +++ b/migrations/versions/0437_email_templates_msgs_left.py @@ -0,0 +1,101 @@ +""" + +Revision ID: 0437_email_templates_msgs_left +Revises: 0436_add_columns_api_keys +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0437_email_templates_msgs_left" +down_revision = "0436_add_columns_api_keys" + +near_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "((service_name)) has sent ((count_en)) out of its limit of ((message_limit_en)) emails per 24 hours.", + "", + "**((service_name)) can send ((remaining_en)) more messages until your limit resets at ((limit_reset_time_et_12hr)) Eastern Time.** Compare official times across Canada](https://nrc.canada.ca/en/web-clock/).", + "", + "To request a limit increase, [contact us](((contact_url))). We’ll respond within 1 business day.", + "", + "The GC Notify team", + "", + "---", + "", + "Bonjour ((name)),", + "", + "((service_name)) a envoyé ((count_fr)) courriels de sa limite de ((message_limit_fr)) courriels par 24 heures.", + "", + "**((service_name)) peut encore envoyer ((remaining_fr)) messages d’ici à ce que votre limite de courriels soit réinitialisée à ((limit_reset_time_et_24hr)), heure de l’Est.** Comparez les heures officielles à travers le Canada.", + "", + "Pour demander une augmentation de votre limite, [veuillez nous joindre](((contact_url))). Nous vous répondrons en un jour ouvrable.", + "", + "L’équipe Notification GC", + ] +) + +templates = [ + { + "id": current_app.config["NEAR_DAILY_EMAIL_LIMIT_TEMPLATE_ID"], + "name": "Near daily EMAIL limit", + "template_type": "email", + "content": near_content, + "subject": "((service_name)) is near its daily limit for emails. | La limite quotidienne d’envoi de courriels est presque atteinte pour ((service_name)).", + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0438_sms_templates_msgs_left.py b/migrations/versions/0438_sms_templates_msgs_left.py new file mode 100644 index 0000000000..131352bd70 --- /dev/null +++ b/migrations/versions/0438_sms_templates_msgs_left.py @@ -0,0 +1,102 @@ +""" + +Revision ID: 0438_sms_templates_msgs_left +Revises: 0437_email_templates_msgs_left +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0438_sms_templates_msgs_left" +down_revision = "0437_email_templates_msgs_left" + +near_content = "\n".join( + [ + "(la version française suit)", + "", + "Hello ((name)),", + "", + "((service_name)) has sent ((count_en)) out of its limit of ((message_limit_en)) text messages per 24 hours.", + "", + "**((service_name)) can send ((remaining_en)) more messages until your limit resets at ((limit_reset_time_et_12hr)) Eastern Time.** Compare [official times across Canada](https://nrc.canada.ca/en/web-clock/).", + "", + "To request a limit increase, [contact us](((contact_url))). We’ll respond within 1 business day.", + "", + "The GC Notify team", + "---", + "", + "Bonjour ((name)),", + "", + "((service_name)) a envoyé ((count_fr)) messages de sa limite de ((message_limit_fr)) messages texte par 24 heures.", + "", + "**((service_name)) peut encore envoyer ((remaining_fr)) messages d’ici à ce que votre limite de messages texte soit réinitialisée à ((limit_reset_time_et_24hr)), heure de l’Est.** Comparez les [heures officielles à travers le Canada](https://nrc.canada.ca/en/web-clock/).", + "", + "Pour demander une augmentation de votre limite, [veuillez nous joindre](((contact_url))). Nous vous répondrons en un jour ouvrable.", + "", + "L’équipe Notification GC", + ] +) + + +templates = [ + { + "id": current_app.config["NEAR_DAILY_SMS_LIMIT_TEMPLATE_ID"], + "template_type": "email", + "subject": "((service_name)) is near its daily limit for text messages. | La limite quotidienne d’envoi de messages texte est presque atteinte pour ((service_name)).", + "content": near_content, + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + name = conn.execute("select name from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + template["name"] = name[0] + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0439_add_index_n_history.py b/migrations/versions/0439_add_index_n_history.py new file mode 100644 index 0000000000..b59a4ed8b4 --- /dev/null +++ b/migrations/versions/0439_add_index_n_history.py @@ -0,0 +1,34 @@ +""" + +Revision ID: 0439_add_index_n_history +Revises: 0438_sms_templates_msgs_left +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op + +revision = "0439_add_index_n_history" +down_revision = "0438_sms_templates_msgs_left" + + +def index_exists(name): + connection = op.get_bind() + result = connection.execute( + "SELECT exists(SELECT 1 from pg_indexes where indexname = '{}') as ix_exists;".format(name) + ).first() + return result.ix_exists + + +# option 1 +def upgrade(): + op.execute("COMMIT") + if not index_exists("ix_notification_history_created_by_id"): + op.create_index( + op.f("ix_notification_history_created_by_id"), "notification_history", ["created_by_id"], postgresql_concurrently=True + ) + + +def downgrade(): + op.drop_index(op.f("ix_notification_history_created_by_id"), table_name="notification_history") diff --git a/migrations/versions/0440_add_index_n_history_comp.py b/migrations/versions/0440_add_index_n_history_comp.py new file mode 100644 index 0000000000..4b65da8a39 --- /dev/null +++ b/migrations/versions/0440_add_index_n_history_comp.py @@ -0,0 +1,37 @@ +""" + +Revision ID: 0439_add_index_n_history +Revises: 0438_sms_templates_msgs_left +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op + +revision = "0440_add_index_n_history_2" +down_revision = "0439_add_index_n_history" + + +def index_exists(name): + connection = op.get_bind() + result = connection.execute( + "SELECT exists(SELECT 1 from pg_indexes where indexname = '{}') as ix_exists;".format(name) + ).first() + return result.ix_exists + + +# option 1 +def upgrade(): + op.execute("COMMIT") + if not index_exists("ix_notification_history_created_api_key_id"): + op.create_index( + op.f("ix_notification_history_created_api_key_id"), + "notification_history", + ["created_at", "api_key_id"], + postgresql_concurrently=True, + ) + + +def downgrade(): + op.drop_index(op.f("ix_notification_history_created_api_key_id"), table_name="notification_history") diff --git a/migrations/versions/0441_add_apikey_revoke_email.py b/migrations/versions/0441_add_apikey_revoke_email.py new file mode 100644 index 0000000000..f5cd16f5e5 --- /dev/null +++ b/migrations/versions/0441_add_apikey_revoke_email.py @@ -0,0 +1,103 @@ +""" +Revision ID: 0441_add_apikey_revoke_email +Revises: 0440_add_index_n_history_comp +Create Date: 2022-09-21 00:00:00 +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0441_add_apikey_revoke_email" +down_revision = "0440_add_index_n_history_2" + +apikey_revoke_template_id = current_app.config["APIKEY_REVOKE_TEMPLATE_ID"] + + +def upgrade(): + template_insert = """ + INSERT INTO templates (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', false) + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', false) + """ + + apikey_revoke_limit_content = "\n".join( + [ + "[[fr]]", + "(La version française suit)", + "[[/fr]]", + "", + "[[en]]", + "Hello,", + "", + "We discovered that an API key for service **''((service_name))''** is publicly available. GC Notify detected the key at ((public_location)). To protect GC Notify’s security, we revoked **''((key_name))''**.", + "", + "If you have questions or concerns, contact us.", + "", + "The GC Notify team", + "[[/en]]", + "", + "---", + "", + "[[fr]]", + "Bonjour,", + "", + "Nous avons découvert qu’une clé API du service **''((service_name))''** était à la disposition du public. Notification GC a détecté la clé à l’adresse suivante : ((public_location)). Pour la sécurité de Notification GC, nous avons révoqué **''((key_name))''**.", + "", + "Pour toutes questions, contactez-nous.", + "", + "L’équipe Notification GC", + "[[/fr]]", + ] + ) + + templates = [ + { + "id": apikey_revoke_template_id, + "name": "API Key revoke EMAIL", + "subject": "We revoked your API key | Nous avons révoqué votre clé API", + "content": apikey_revoke_limit_content, + }, + ] + + for template in templates: + op.execute( + template_insert.format( + template["id"], + template["name"], + "email", + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + "normal", + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + "email", + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + "normal", + ) + ) + + +def downgrade(): + op.execute("DELETE FROM notifications WHERE template_id = '{}'".format(apikey_revoke_template_id)) + op.execute("DELETE FROM notification_history WHERE template_id = '{}'".format(apikey_revoke_template_id)) + op.execute("DELETE FROM template_redacted WHERE template_id = '{}'".format(apikey_revoke_template_id)) + op.execute("DELETE FROM templates_history WHERE id = '{}'".format(apikey_revoke_template_id)) + op.execute("DELETE FROM templates WHERE id = '{}'".format(apikey_revoke_template_id)) diff --git a/migrations/versions/0442_add_heartbeat_templates.py b/migrations/versions/0442_add_heartbeat_templates.py new file mode 100644 index 0000000000..4923cf9396 --- /dev/null +++ b/migrations/versions/0442_add_heartbeat_templates.py @@ -0,0 +1,115 @@ +""" +Revision ID: 0442_add_heartbeat_templates +Revises: 0441_add_apikey_revoke_email +Create Date: 2022-09-21 00:00:00 +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0442_add_heartbeat_templates" +down_revision = "0441_add_apikey_revoke_email" + +templates = [ + { + "id": current_app.config["HEARTBEAT_TEMPLATE_EMAIL_LOW"], + "name": "HEARTBEAT_TEMPLATE_EMAIL_LOW", + "template_type": "email", + "content": "HEARTBEAT_TEMPLATE_EMAIL_LOW", + "subject": "HEARTBEAT_TEMPLATE_EMAIL_LOW", + "process_type": "bulk", + }, + { + "id": current_app.config["HEARTBEAT_TEMPLATE_EMAIL_MEDIUM"], + "name": "HEARTBEAT_TEMPLATE_EMAIL_MEDIUM", + "template_type": "email", + "content": "HEARTBEAT_TEMPLATE_EMAIL_MEDIUM", + "subject": "HEARTBEAT_TEMPLATE_EMAIL_MEDIUM", + "process_type": "normal", + }, + { + "id": current_app.config["HEARTBEAT_TEMPLATE_EMAIL_HIGH"], + "name": "HEARTBEAT_TEMPLATE_EMAIL_HIGH", + "template_type": "email", + "content": "HEARTBEAT_TEMPLATE_EMAIL_HIGH", + "subject": "HEARTBEAT_TEMPLATE_EMAIL_HIGH", + "process_type": "priority", + }, + { + "id": current_app.config["HEARTBEAT_TEMPLATE_SMS_LOW"], + "name": "HEARTBEAT_TEMPLATE_SMS_LOW", + "template_type": "sms", + "content": "HEARTBEAT_TEMPLATE_SMS_LOW", + "subject": "HEARTBEAT_TEMPLATE_SMS_LOW", + "process_type": "bulk", + }, + { + "id": current_app.config["HEARTBEAT_TEMPLATE_SMS_MEDIUM"], + "name": "HEARTBEAT_TEMPLATE_SMS_MEDIUM", + "template_type": "sms", + "content": "HEARTBEAT_TEMPLATE_SMS_MEDIUM", + "subject": "HEARTBEAT_TEMPLATE_SMS_MEDIUM", + "process_type": "normal", + }, + { + "id": current_app.config["HEARTBEAT_TEMPLATE_SMS_HIGH"], + "name": "HEARTBEAT_TEMPLATE_SMS_HIGH", + "template_type": "sms", + "content": "HEARTBEAT_TEMPLATE_SMS_HIGH", + "subject": "HEARTBEAT_TEMPLATE_SMS_HIGH", + "process_type": "priority", + }, +] + + +def upgrade(): + template_insert = """ + INSERT INTO templates (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', False) + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', 1, '{}', False) + """ + + for template in templates: + op.execute( + template_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["process_type"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["process_type"], + ) + ) + + +def downgrade(): + TEMPLATE_IDS = ",".join(["'{}'".format(x["id"]) for x in templates]) + + op.execute("DELETE FROM notifications WHERE template_id in ({})".format(TEMPLATE_IDS)) + op.execute("DELETE FROM notification_history WHERE template_id in ({})".format(TEMPLATE_IDS)) + op.execute("DELETE FROM template_redacted WHERE template_id in ({})".format(TEMPLATE_IDS)) + op.execute("DELETE FROM templates_history WHERE id in ({})".format(TEMPLATE_IDS)) + op.execute("DELETE FROM templates WHERE id in ({})".format(TEMPLATE_IDS)) diff --git a/migrations/versions/0443_add_apikey_last_used_column.py b/migrations/versions/0443_add_apikey_last_used_column.py new file mode 100644 index 0000000000..23d532d983 --- /dev/null +++ b/migrations/versions/0443_add_apikey_last_used_column.py @@ -0,0 +1,22 @@ +""" +Revision ID: 0443_add_apikey_last_used_column +Revises: 0442_add_heartbeat_templates +Create Date: 2022-09-21 00:00:00 +""" +from datetime import datetime + +import sqlalchemy as sa +from alembic import op + +revision = "0443_add_apikey_last_used_column" +down_revision = "0442_add_heartbeat_templates" + + +def upgrade(): + op.add_column("api_keys", sa.Column("last_used_timestamp", sa.DateTime(), nullable=True)) + op.add_column("api_keys_history", sa.Column("last_used_timestamp", sa.DateTime(), nullable=True)) + + +def downgrade(): + op.drop_column("api_keys", "last_used_timestamp") + op.drop_column("api_keys_history", "last_used_timestamp") diff --git a/migrations/versions/0444_add_index_n_history2.py b/migrations/versions/0444_add_index_n_history2.py new file mode 100644 index 0000000000..2b3604d984 --- /dev/null +++ b/migrations/versions/0444_add_index_n_history2.py @@ -0,0 +1,40 @@ +""" + +Revision ID: 0439_add_index_n_history +Revises: 0438_sms_templates_msgs_left +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op + +revision = "0444_add_index_n_history2" +down_revision = "0443_add_apikey_last_used_column" + + +def index_exists(name): + connection = op.get_bind() + result = connection.execute( + "SELECT exists(SELECT 1 from pg_indexes where indexname = '{}') as ix_exists;".format(name) + ).first() + return result.ix_exists + + +# option 1 +def upgrade(): + op.execute("COMMIT") + if not index_exists("ix_notification_history_api_key_id_created"): + op.create_index( + op.f("ix_notification_history_api_key_id_created"), + "notification_history", + ["api_key_id", "created_at"], + postgresql_concurrently=True, + ) + + +def downgrade(): + op.execute("COMMIT") + op.drop_index( + op.f("ix_notification_history_api_key_id_created"), table_name="notification_history", postgresql_concurrently=True + ) diff --git a/migrations/versions/0445_add_org_id_branding.py b/migrations/versions/0445_add_org_id_branding.py new file mode 100644 index 0000000000..0504d5f492 --- /dev/null +++ b/migrations/versions/0445_add_org_id_branding.py @@ -0,0 +1,46 @@ +""" +Revision ID: 0445_add_org_id_branding +Revises: 0444_add_index_n_history2.py +Create Date: 2024-02-27 +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "0445_add_org_id_branding" +down_revision = "0444_add_index_n_history2" + + +def upgrade(): + op.add_column( + "email_branding", + sa.Column("organisation_id", postgresql.UUID(as_uuid=True), nullable=True), + ) + op.create_index( + op.f("ix_email_branding_organisation_id"), + "email_branding", + ["organisation_id"], + unique=False, + ) + op.create_foreign_key( + "fk_email_branding_organisation", + "email_branding", + "organisation", + ["organisation_id"], + ["id"], + ondelete="SET NULL", + ) + op.drop_constraint("fk_organisation_email_branding_id", "organisation", type_="foreignkey") + + +def downgrade(): + op.drop_index(op.f("ix_email_branding_organisation_id"), table_name="email_branding") + op.drop_constraint("fk_email_branding_organisation", "email_branding", type_="foreignkey") + op.drop_column("email_branding", "organisation_id") + op.create_foreign_key( + "fk_organisation_email_branding_id", + "organisation", + "email_branding", + ["email_branding_id"], + ["id"], + ) diff --git a/migrations/versions/0446_add_alt_text.py b/migrations/versions/0446_add_alt_text.py new file mode 100644 index 0000000000..868ce33db7 --- /dev/null +++ b/migrations/versions/0446_add_alt_text.py @@ -0,0 +1,34 @@ +""" +Revision ID: 0446_add_alt_text.py +Revises: 0445_add_org_id_branding.py +Create Date: 2024-04-23 +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy import text + +revision = "0446_add_alt_text" +down_revision = "0445_add_org_id_branding" + + +def upgrade(): + table_description = op.get_bind().execute( + text("SELECT * FROM information_schema.columns WHERE table_name = 'email_branding'") + ) + + # Check if the column exists + if "alt_text_en" not in [column["column_name"] for column in table_description]: + op.add_column( + "email_branding", + sa.Column("alt_text_en", sa.String(), nullable=True), + ) + if "alt_text_fr" not in [column["column_name"] for column in table_description]: + op.add_column( + "email_branding", + sa.Column("alt_text_fr", sa.String(), nullable=True), + ) + + +def downgrade(): + op.drop_column("email_branding", "alt_text_fr") + op.drop_column("email_branding", "alt_text_en") diff --git a/migrations/versions/0447_update_verify_code_template.py b/migrations/versions/0447_update_verify_code_template.py new file mode 100644 index 0000000000..9db7e8f1c8 --- /dev/null +++ b/migrations/versions/0447_update_verify_code_template.py @@ -0,0 +1,97 @@ +""" + +Revision ID: 0447_update_verify_code_template +Revises: 0446_add_alt_text +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0447_update_verify_code_template" +down_revision = "0446_add_alt_text" + +near_content = "\n".join( + [ + "[[en]]", + "Hi ((name)),", + "", + "Here is your security code to log in to GC Notify:", + "", + "^ **((verify_code))**", + "[[/en]]", + "", + "---", + "", + "[[fr]]", + "Bonjour ((name)),", + "", + "Voici votre code de sécurité pour vous connecter à Notification GC:", + "", + "^ **((verify_code))**", + "[[/fr]]", + ] +) + + +templates = [ + { + "id": current_app.config["EMAIL_2FA_TEMPLATE_ID"], + "template_type": "email", + "subject": "Sign in | Connectez-vous", + "content": near_content, + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + name = conn.execute("select name from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + template["name"] = name[0] + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0448_update_verify_code2.py b/migrations/versions/0448_update_verify_code2.py new file mode 100644 index 0000000000..39f3acee1b --- /dev/null +++ b/migrations/versions/0448_update_verify_code2.py @@ -0,0 +1,97 @@ +""" + +Revision ID: 0448_update_verify_code2 +Revises: 0447_update_verify_code_template +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0448_update_verify_code2" +down_revision = "0447_update_verify_code_template" + +near_content = "\n".join( + [ + "[[en]]", + "Hi ((name)),", + "", + "Here is your security code to log in to GC Notify:", + "", + "^ ((verify_code))", + "[[/en]]", + "", + "---", + "", + "[[fr]]", + "Bonjour ((name)),", + "", + "Voici votre code de sécurité pour vous connecter à Notification GC:", + "", + "^ ((verify_code))", + "[[/fr]]", + ] +) + + +templates = [ + { + "id": current_app.config["EMAIL_2FA_TEMPLATE_ID"], + "template_type": "email", + "subject": "Sign in | Connectez-vous", + "content": near_content, + "process_type": "priority", + }, +] + + +def upgrade(): + conn = op.get_bind() + + for template in templates: + current_version = conn.execute("select version from templates where id='{}'".format(template["id"])).fetchone() + name = conn.execute("select name from templates where id='{}'".format(template["id"])).fetchone() + template["version"] = current_version[0] + 1 + template["name"] = name[0] + + template_update = """ + UPDATE templates SET content = '{}', subject = '{}', version = '{}', updated_at = '{}' + WHERE id = '{}' + """ + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + + for template in templates: + op.execute( + template_update.format( + template["content"], + template["subject"], + template["version"], + datetime.utcnow(), + template["id"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + template["version"], + template["process_type"], + ) + ) + + +def downgrade(): + pass diff --git a/migrations/versions/0449_update_magic_link_auth.py b/migrations/versions/0449_update_magic_link_auth.py new file mode 100644 index 0000000000..6e29d5501c --- /dev/null +++ b/migrations/versions/0449_update_magic_link_auth.py @@ -0,0 +1,97 @@ +""" + +Revision ID: 0448_update_verify_code2 +Revises: 0449_update_magic_link_auth +Create Date: 2023-10-05 00:00:00 + +""" +from datetime import datetime + +from alembic import op +from flask import current_app + +revision = "0449_update_magic_link_auth" +down_revision = "0448_update_verify_code2" + +near_content = "\n".join( + [ + "[[en]]" + "Hi ((name))," + "" + "Here is your magic link to log in to GC Notify:" + "" + "^ **[Sign-in](((link_url_en)))**" + "[[/en]]" + "" + "---" + "" + "[[fr]]" + "Bonjour ((name))," + "" + "Voici votre lien magique pour vous connecter à Notification GC:" + "" + "^ **[Connectez-vous](((link_url_fr)))**" + "[[/fr]]" + ] +) + + +template = { + "id": current_app.config["EMAIL_MAGIC_LINK_TEMPLATE_ID"], + "template_type": "email", + "subject": "Sign in | Connectez-vous", + "content": near_content, + "process_type": "priority", + "name": "Sign in - Magic Link | Se connecter - Lien magique", +} + + +def upgrade(): + conn = op.get_bind() + + template_insert = """ + INSERT INTO templates (id, name, template_type, created_at, updated_at, content, service_id, subject, created_by_id, version, archived, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', false, '{}', false) + """ + + template_history_insert = """ + INSERT INTO templates_history (id, name, template_type, created_at, content, archived, service_id, subject, + created_by_id, version, process_type, hidden) + VALUES ('{}', '{}', '{}', '{}', '{}', False, '{}', '{}', '{}', {}, '{}', false) + """ + op.execute( + template_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + 1, + template["process_type"], + ) + ) + + op.execute( + template_history_insert.format( + template["id"], + template["name"], + template["template_type"], + datetime.utcnow(), + template["content"], + current_app.config["NOTIFY_SERVICE_ID"], + template["subject"], + current_app.config["NOTIFY_USER_ID"], + 1, + template["process_type"], + ) + ) + + op.execute("INSERT INTO auth_type (name) VALUES ('magic_link')") + + +def downgrade(): + op.execute("DELETE FROM auth_type WHERE name = 'magic_link'") diff --git a/migrations/versions/0450_enable_pinpoint_provider.py b/migrations/versions/0450_enable_pinpoint_provider.py new file mode 100644 index 0000000000..0c2c8247dd --- /dev/null +++ b/migrations/versions/0450_enable_pinpoint_provider.py @@ -0,0 +1,19 @@ +""" + +Revision ID: 0450_enable_pinpoint_provider +Revises: 0449_update_magic_link_auth +Create Date: 2021-01-08 09:03:00 .214680 + +""" +from alembic import op + +revision = "0450_enable_pinpoint_provider" +down_revision = "0449_update_magic_link_auth" + + +def upgrade(): + op.execute("UPDATE provider_details set active=true where identifier in ('pinpoint');") + + +def downgrade(): + op.execute("UPDATE provider_details set active=false where identifier in ('pinpoint');") diff --git a/migrations/versions/0451_create_db_users.py b/migrations/versions/0451_create_db_users.py new file mode 100644 index 0000000000..c16c162db5 --- /dev/null +++ b/migrations/versions/0451_create_db_users.py @@ -0,0 +1,39 @@ +""" + +Revision ID: 0451_create_db_users +Revises: 0450_enable_pinpoint_provider +Create Date: 2024-05-23 12:00:00 + +""" +from alembic import op + +revision = "0451_create_db_users" +down_revision = "0450_enable_pinpoint_provider" + +super_role = "rds_superuser" +roles = ["app_db_user", "quicksight_db_user"] + + +def upgrade(): + create_role_if_not_exist(super_role) + for role in roles: + create_role_if_not_exist(role) + op.execute(f"GRANT {super_role} TO {role} WITH ADMIN OPTION;") + + +def create_role_if_not_exist(role): + """ + Makes sure the expected user exists in the database before performing the GRANT USER operation. + If the user already exists, nothing happens. This is needed so that the migrations can be + run on localhost where the users do not exist. + """ + op.execute( + f""" + DO $$ + BEGIN + CREATE ROLE {role}; + EXCEPTION WHEN duplicate_object THEN RAISE NOTICE '%, skipping', SQLERRM USING ERRCODE = SQLSTATE; + END + $$; + """ + ) diff --git a/migrations/versions/0452_set_pgaudit_config.py b/migrations/versions/0452_set_pgaudit_config.py new file mode 100644 index 0000000000..88f0e87b8e --- /dev/null +++ b/migrations/versions/0452_set_pgaudit_config.py @@ -0,0 +1,53 @@ +""" + +Revision ID: 0452_set_pgaudit_config +Revises: 0451_create_db_users +Create Date: 2024-05-27 12:00:00 + +""" +from alembic import op + +revision = "0452_set_pgaudit_config" +down_revision = "0451_create_db_users" + +users = ["app_db_user", "rdsproxyadmin"] +database_name = op.get_bind().engine.url.database # database name that the migration is being run on + + +def upgrade(): + # Skip this migration in the test database as there are multiple test databases that are created. + # This leads to a race condition attempting to alter the same users multiple times and causes + # sporadic unit test failures. + if "test_notification_api" in database_name: + return + + for user in users: + create_user_if_not_exists(user) + op.execute(f"ALTER USER {user} SET pgaudit.log TO 'NONE'") + + +def downgrade(): + if "test_notification_api" in database_name: + return + + # Reset the pgaudit.log setting + for user in users: + op.execute(f"ALTER USER {user} RESET pgaudit.log") + + +def create_user_if_not_exists(user): + """ + Makes sure the expected user exists in the database before performing the ALTER USER operation. + If the user already exists, nothing happens. This is needed so that the migrations can be + run on localhost where the users do not exist. + """ + op.execute( + f""" + DO $$ + BEGIN + CREATE USER {user}; + EXCEPTION WHEN duplicate_object THEN RAISE NOTICE '%, skipping', SQLERRM USING ERRCODE = SQLSTATE; + END + $$; + """ + ) diff --git a/migrations/versions/0453_set_supports_international.py b/migrations/versions/0453_set_supports_international.py new file mode 100644 index 0000000000..008126b815 --- /dev/null +++ b/migrations/versions/0453_set_supports_international.py @@ -0,0 +1,25 @@ +""" + +Revision ID: 0453_set_supports_international +Revises: 0452_set_pgaudit_config +Create Date: 2024-06-20 14:36:03.038934 + +""" +from alembic import op + +revision = "0453_set_supports_international" +down_revision = "0452_set_pgaudit_config" + + +def upgrade(): + op.execute("UPDATE provider_details SET supports_international=True WHERE identifier='sns'") + op.execute("UPDATE provider_details SET supports_international=True WHERE identifier='pinpoint'") + op.execute("UPDATE provider_details_history SET supports_international=True WHERE identifier='sns'") + op.execute("UPDATE provider_details_history SET supports_international=True WHERE identifier='pinpoint'") + + +def downgrade(): + op.execute("UPDATE provider_details SET supports_international=False WHERE identifier='sns'") + op.execute("UPDATE provider_details SET supports_international=False WHERE identifier='pinpoint'") + op.execute("UPDATE provider_details_history SET supports_international=False WHERE identifier='sns'") + op.execute("UPDATE provider_details_history SET supports_international=False WHERE identifier='pinpoint'") diff --git a/migrations/versions/0454_add_template_category.py b/migrations/versions/0454_add_template_category.py new file mode 100644 index 0000000000..b98339fb9f --- /dev/null +++ b/migrations/versions/0454_add_template_category.py @@ -0,0 +1,85 @@ +""" + +Revision ID: 0454_add_template_categories +Revises: 0453_set_supports_international +Create Date: 2024-06-11 13:32:00 +""" + +from datetime import datetime + +import sqlalchemy as sa +from alembic import op +from flask import current_app +from sqlalchemy.dialects import postgresql + +revision = "0454_add_template_category" +down_revision = "0453_set_supports_international" + + +def upgrade(): + op.create_table( + "template_categories", + sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True, nullable=False), + sa.Column("name_en", sa.String(length=255), nullable=False), + sa.Column("name_fr", sa.String(length=255), nullable=False), + sa.Column("description_en", sa.String(length=255), nullable=True), + sa.Column("description_fr", sa.String(length=255), nullable=True), + sa.Column("sms_process_type", sa.String(length=255), nullable=False), + sa.Column("email_process_type", sa.String(length=255), nullable=False), + sa.Column("hidden", sa.Boolean(), nullable=False), + sa.Column("created_at", sa.DateTime(), server_default=sa.func.now(), nullable=False), + sa.Column("updated_at", sa.DateTime(), server_default=sa.func.now(), nullable=True), + sa.UniqueConstraint("name_en"), + sa.UniqueConstraint("name_fr"), + ) + + # Insert the generic low, medium, and high categories + op.execute( + "INSERT INTO template_categories (id, name_en, name_fr, sms_process_type, email_process_type, hidden, created_at) VALUES ('{}', 'Low Category (Bulk)', 'Catégorie Basse (En Vrac)', 'low', 'low', true, now())".format( + current_app.config["DEFAULT_TEMPLATE_CATEGORY_LOW"], + ) + ) + op.execute( + "INSERT INTO template_categories (id, name_en, name_fr, sms_process_type, email_process_type, hidden, created_at) VALUES ('{}', 'Medium Category (Normal)', 'Catégorie Moyenne (Normale)', 'low', 'low', true, now())".format( + current_app.config["DEFAULT_TEMPLATE_CATEGORY_MEDIUM"] + ) + ) + op.execute( + "INSERT INTO template_categories (id, name_en, name_fr, sms_process_type, email_process_type, hidden, created_at) VALUES ('{}', 'High Category (Priority)', 'Catégorie Haute (Priorité)', 'low', 'low', true, now())".format( + current_app.config["DEFAULT_TEMPLATE_CATEGORY_HIGH"] + ) + ) + + op.add_column("templates", sa.Column("template_category_id", postgresql.UUID(as_uuid=True), nullable=True)) + op.add_column("templates_history", sa.Column("template_category_id", postgresql.UUID(as_uuid=True), nullable=True)) + op.create_index( + op.f("ix_template_category_id"), + "templates", + ["template_category_id"], + unique=False, + ) + op.create_index( + op.f("ix_template_categories_name_en"), + "template_categories", + ["name_en"], + unique=False, + ) + op.create_index( + op.f("ix_template_categories_name_fr"), + "template_categories", + ["name_fr"], + unique=False, + ) + op.alter_column("templates", "process_type", nullable=True) + op.create_foreign_key("fk_template_template_categories", "templates", "template_categories", ["template_category_id"], ["id"]) + + +def downgrade(): + op.drop_constraint("fk_template_template_categories", "templates", type_="foreignkey") + op.drop_index(op.f("ix_template_category_id"), table_name="templates") + op.drop_index(op.f("ix_template_categories_name_en"), table_name="template_categories") + op.drop_index(op.f("ix_template_categories_name_fr"), table_name="template_categories") + op.alter_column("templates", "process_type", nullable=False) + op.drop_column("templates", "template_category_id") + op.drop_column("templates_history", "template_category_id") + op.drop_table("template_categories") diff --git a/migrations/versions/0455_add_starter_category.py b/migrations/versions/0455_add_starter_category.py new file mode 100644 index 0000000000..efe74bf2d0 --- /dev/null +++ b/migrations/versions/0455_add_starter_category.py @@ -0,0 +1,100 @@ +""" + +Revision ID: 0455_add_starter_category +Revises: 0454_add_template_category +Create Date: 2024-06-11 13:32:00 +""" +from alembic import op + +revision = "0455_add_starter_category" +down_revision = "0454_add_template_category" + +CAT_ALERT_ID = "1d8ce435-a7e5-431b-aaa2-a418bc4d14f9" +CAT_AUTH_ID = "b6c42a7e-2a26-4a07-802b-123a5c3198a9" +CAT_AUTO_ID = "977e2a00-f957-4ff0-92f2-ca3286b24786" +CAT_DECISION_ID = "e81678c0-4897-4111-b9d0-172f6b595f89" +CAT_INFO_ID = "207b293c-2ae5-48e8-836d-fcabd60b2153" +CAT_REMINDER_ID = "edb966f3-4a4c-47a4-96ab-05ff259b919c" +CAT_REQUEST_ID = "e0b8fbe5-f435-4977-8fc8-03f13d9296a5" +CAT_STATUS_ID = "55eb1137-6dc6-4094-9031-f61124a279dc" +CAT_TEST_ID = "7c16aa95-e2e1-4497-81d6-04c656520fe4" + +# List of category IDs +category_ids = [ + CAT_ALERT_ID, + CAT_AUTH_ID, + CAT_AUTO_ID, + CAT_DECISION_ID, + CAT_INFO_ID, + CAT_REMINDER_ID, + CAT_REQUEST_ID, + CAT_STATUS_ID, + CAT_TEST_ID, +] + +# Corresponding English and French names and descriptions and process_type +category_data = [ + ("Alert", "Alerte", "System checks and monitoring", "Contrôles et suivi du système", "medium", "medium"), + ( + "Authentication", + "Authentification", + "Password resets and two factor verification", + "Réinitialisations de mots de passe et vérification à deux facteurs", + "priority", + "priority", + ), + ( + "Automatic reply", + "Réponse automatique", + "No-reply and confirmation messages", + "Messages auxquels il est impossible de répondre et messages de confirmation", + "priority", + "priority", + ), + ("Decision", "Décision", "Permits, documents and results", "Permis, documents et résultats", "low", "low"), + ( + "Information blast", + "Information de masse", + "Newsletters, surveys and general information", + "Infolettres, sondages et renseignements généraux", + "bulk", + "bulk", + ), + ("Reminder", "Rappel", "Appointments and deadlines", "Rendez-vous et échéances", "normal", "normal"), + ("Request", "Demande", "Request: Follow up and next steps", "Suivis et prochaines étapes", "normal", "normal"), + ("Status update", "État d’avancement", "Changes and progress", "Changements et progrès", "normal", "normal"), + ("Test", "Test", "Practice messages", "Messages à titre d’entraînement", "bulk", "bulk"), +] + + +def upgrade(): + # Insert new process_type + op.execute("INSERT INTO template_process_type (name) VALUES ('low')") + op.execute("INSERT INTO template_process_type (name) VALUES ('medium')") + op.execute("INSERT INTO template_process_type (name) VALUES ('high')") + + def insert_statement(id, name_en, name_fr, description_en, description_fr, sms_process_type, email_process_type): + # Escape single quotes in string values + name_fr = name_fr.replace("'", "''") + description_fr = description_fr.replace("'", "''") + + return f""" + INSERT INTO template_categories + (id, name_en, name_fr, description_en, description_fr, sms_process_type, email_process_type, hidden, created_at) + VALUES + ('{id}', '{name_en}', '{name_fr}', '{description_en}', '{description_fr}', '{sms_process_type}', '{email_process_type}', false, now()) + """ + + for id, (name_en, name_fr, desc_en, desc_fr, sms_process_type, email_process_type) in zip(category_ids, category_data): + stmt = insert_statement(id, name_en, name_fr, desc_en, desc_fr, sms_process_type, email_process_type) + op.execute(stmt) + + +def downgrade(): + for id in category_ids: + op.execute(f"DELETE FROM template_categories WHERE id = '{id}'") + + # Delete process_type + op.execute("DELETE FROM template_process_type WHERE name = 'low'") + op.execute("DELETE FROM template_process_type WHERE name = 'medium'") + op.execute("DELETE FROM template_process_type WHERE name = 'high'") diff --git a/migrations/versions/0456_update_template_categories.py b/migrations/versions/0456_update_template_categories.py new file mode 100644 index 0000000000..add52423ba --- /dev/null +++ b/migrations/versions/0456_update_template_categories.py @@ -0,0 +1,73 @@ +""" +Revision ID: 0456_update_template_categories +Revises: 0455_add_starter_category +Create Date: 2024-06-11 13:32:00 +""" +import sqlalchemy as sa +from alembic import op + +revision = "0456_update_template_categories" +down_revision = "0455_add_starter_category" + +LOW_CATEGORY_ID = "0dda24c2-982a-4f44-9749-0e38b2607e89" +MEDIUM_CATEGORY_ID = "f75d6706-21b7-437e-b93a-2c0ab771e28e" +HIGH_CATEGORY_ID = "c4f87d7c-a55b-4c0f-91fe-e56c65bb1871" +CAT_ALERT_ID = "1d8ce435-a7e5-431b-aaa2-a418bc4d14f9" +CAT_AUTH_ID = "b6c42a7e-2a26-4a07-802b-123a5c3198a9" +CAT_AUTO_ID = "977e2a00-f957-4ff0-92f2-ca3286b24786" +CAT_DECISION_ID = "e81678c0-4897-4111-b9d0-172f6b595f89" +CAT_INFO_ID = "207b293c-2ae5-48e8-836d-fcabd60b2153" +CAT_REMINDER_ID = "edb966f3-4a4c-47a4-96ab-05ff259b919c" +CAT_REQUEST_ID = "e0b8fbe5-f435-4977-8fc8-03f13d9296a5" +CAT_STATUS_ID = "55eb1137-6dc6-4094-9031-f61124a279dc" +CAT_TEST_ID = "7c16aa95-e2e1-4497-81d6-04c656520fe4" + +SHORT_CODE_CATS = (HIGH_CATEGORY_ID, CAT_AUTH_ID, CAT_AUTO_ID, CAT_DECISION_ID, CAT_REMINDER_ID, CAT_REQUEST_ID, CAT_STATUS_ID) +LONG_CODE_CATS = (LOW_CATEGORY_ID, MEDIUM_CATEGORY_ID, CAT_ALERT_ID, CAT_INFO_ID, CAT_TEST_ID) + +sms_options = ("short_code", "long_code") +sms_sending_vehicle = sa.Enum(*sms_options, name="sms_sending_vehicle") + + +def upgrade(): + sms_sending_vehicle.create(op.get_bind(), checkfirst=True) + + op.add_column( + "template_categories", sa.Column("sms_sending_vehicle", sms_sending_vehicle, server_default="long_code", nullable=False) + ) + + # Update the generic categories + op.execute( + "UPDATE template_categories SET sms_process_type = 'bulk', email_process_type = 'bulk' WHERE id = '{}'".format( + LOW_CATEGORY_ID, + ) + ) + op.execute( + "UPDATE template_categories SET sms_process_type = 'normal', email_process_type = 'normal' WHERE id = '{}'".format( + MEDIUM_CATEGORY_ID, + ) + ) + op.execute( + "UPDATE template_categories SET sms_process_type = 'priority', email_process_type = 'priority' WHERE id = '{}'".format( + HIGH_CATEGORY_ID, + ) + ) + + # Update the sms_sending_vehicle for the starter categories + + op.execute( + "UPDATE template_categories SET sms_sending_vehicle = 'short_code' WHERE id in {}".format( + SHORT_CODE_CATS, + ) + ) + + op.execute( + "UPDATE template_categories SET sms_sending_vehicle = 'long_code' WHERE id in {}".format( + LONG_CODE_CATS, + ) + ) + + +def downgrade(): + op.drop_column("template_categories", "sms_sending_vehicle") + sms_sending_vehicle.drop(op.get_bind(), checkfirst=True) diff --git a/mypy.ini b/mypy.ini index bb07df6380..95cf428cd6 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,5 +1,5 @@ [mypy] -python_version = 3.9 +python_version = 3.10 [mypy-pytest.*] ignore_missing_imports = True @@ -25,9 +25,6 @@ ignore_missing_imports = True [mypy-boto3.*] ignore_missing_imports = True -[mypy-marshmallow.*] -ignore_missing_imports = True - [mypy-requests_mock.*] ignore_missing_imports = True @@ -64,21 +61,12 @@ ignore_missing_imports = True [mypy-flask_marshmallow.*] ignore_missing_imports = True -[mypy-marshmallow_sqlalchemy.*] -ignore_missing_imports = True - [mypy-flask_migrate.*] ignore_missing_imports = True [mypy-phonenumbers.*] ignore_missing_imports = True -[mypy-assemblyline_client.*] -ignore_missing_imports = True - -[mypy-app.clients.mlwr.mlwr] -ignore_missing_imports = True - [mypy-newrelic.*] ignore_missing_imports = True @@ -88,8 +76,14 @@ ignore_missing_imports = True [mypy-locust.*] ignore_missing_imports = True +[mypy-flask_redis.*] +ignore_missing_imports = True + [mypy-flask_sqlalchemy.*] ignore_missing_imports = True -[mypy-awsgi.*] +[mypy-simple_salesforce.*] +ignore_missing_imports = True + +[mypy-aws_xray_sdk.*] ignore_missing_imports = True \ No newline at end of file diff --git a/newrelic-layer.zip b/newrelic-layer.zip new file mode 100644 index 0000000000..399e0e1dc1 Binary files /dev/null and b/newrelic-layer.zip differ diff --git a/newrelic.ini b/newrelic.ini new file mode 100644 index 0000000000..eddd873bfd --- /dev/null +++ b/newrelic.ini @@ -0,0 +1,209 @@ +# --------------------------------------------------------------------------- + +# +# This file configures the New Relic Python Agent. +# +# The path to the configuration file should be supplied to the function +# newrelic.agent.initialize() when the agent is being initialized. +# +# The configuration file follows a structure similar to what you would +# find for Microsoft Windows INI files. For further information on the +# configuration file format see the Python ConfigParser documentation at: +# +# http://docs.python.org/library/configparser.html +# +# For further discussion on the behaviour of the Python agent that can +# be configured via this configuration file see: +# +# http://newrelic.com/docs/python/python-agent-configuration +# + +# --------------------------------------------------------------------------- + +# Here are the settings that are common to all environments. + +[newrelic] + +# You must specify the license key associated with your New +# Relic account. This key binds the Python Agent's data to your +# account in the New Relic service. +# license_key = *** REPLACE ME *** + +# The application name. Set this to be the name of your +# application as you would like it to show up in New Relic UI. +# The UI will then auto-map instances of your application into a +# entry on your home dashboard page. +# app_name = Python Application + +# When "true", the agent collects performance data about your +# application and reports this data to the New Relic UI at +# newrelic.com. This global switch is normally overridden for +# each environment below. +# monitor_mode = true + +# Sets the name of a file to log agent messages to. Useful for +# debugging any issues with the agent. This is not set by +# default as it is not known in advance what user your web +# application processes will run as and where they have +# permission to write to. Whatever you set this to you must +# ensure that the permissions for the containing directory and +# the file itself are correct, and that the user that your web +# application runs as can write to the file. If not able to +# write out a log file, it is also possible to say "stderr" and +# output to standard error output. This would normally result in +# output appearing in your web server log. +#log_file = /tmp/newrelic-python-agent.log + +# Sets the level of detail of messages sent to the log file, if +# a log file location has been provided. Possible values, in +# increasing order of detail, are: "critical", "error", "warning", +# "info" and "debug". When reporting any agent issues to New +# Relic technical support, the most useful setting for the +# support engineers is "debug". However, this can generate a lot +# of information very quickly, so it is best not to keep the +# agent at this level for longer than it takes to reproduce the +# problem you are experiencing. +# log_level = info + +# The Python Agent communicates with the New Relic service using +# SSL by default. Note that this does result in an increase in +# CPU overhead, over and above what would occur for a non SSL +# connection, to perform the encryption involved in the SSL +# communication. This work is though done in a distinct thread +# to those handling your web requests, so it should not impact +# response times. You can if you wish revert to using a non SSL +# connection, but this will result in information being sent +# over a plain socket connection and will not be as secure. +# ssl = true + +# High Security Mode enforces certain security settings, and +# prevents them from being overridden, so that no sensitive data +# is sent to New Relic. Enabling High Security Mode means that +# SSL is turned on, request parameters are not collected, and SQL +# can not be sent to New Relic in its raw form. To activate High +# Security Mode, it must be set to 'true' in this local .ini +# configuration file AND be set to 'true' in the server-side +# configuration in the New Relic user interface. For details, see +# https://docs.newrelic.com/docs/subscriptions/high-security +# high_security = false + +# The Python Agent will attempt to connect directly to the New +# Relic service. If there is an intermediate firewall between +# your host and the New Relic service that requires you to use a +# HTTP proxy, then you should set both the "proxy_host" and +# "proxy_port" settings to the required values for the HTTP +# proxy. The "proxy_user" and "proxy_pass" settings should +# additionally be set if proxy authentication is implemented by +# the HTTP proxy. The "proxy_scheme" setting dictates what +# protocol scheme is used in talking to the HTTP proxy. This +# would normally always be set as "http" which will result in the +# agent then using a SSL tunnel through the HTTP proxy for end to +# end encryption. +# proxy_scheme = http +# proxy_host = hostname +# proxy_port = 8080 +# proxy_user = +# proxy_pass = + +# Capturing request parameters is off by default. To enable the +# capturing of request parameters, first ensure that the setting +# "attributes.enabled" is set to "true" (the default value), and +# then add "request.parameters.*" to the "attributes.include" +# setting. For details about attributes configuration, please +# consult the documentation. +# attributes.include = request.parameters.* + +# The transaction tracer captures deep information about slow +# transactions and sends this to the UI on a periodic basis. The +# transaction tracer is enabled by default. Set this to "false" +# to turn it off. +# transaction_tracer.enabled = true + +# Threshold in seconds for when to collect a transaction trace. +# When the response time of a controller action exceeds this +# threshold, a transaction trace will be recorded and sent to +# the UI. Valid values are any positive float value, or (default) +# "apdex_f", which will use the threshold for a dissatisfying +# Apdex controller action - four times the Apdex T value. +# transaction_tracer.transaction_threshold = apdex_f + +# When the transaction tracer is on, SQL statements can +# optionally be recorded. The recorder has three modes, "off" +# which sends no SQL, "raw" which sends the SQL statement in its +# original form, and "obfuscated", which strips out numeric and +# string literals. +# transaction_tracer.record_sql = obfuscated + +# Threshold in seconds for when to collect stack trace for a SQL +# call. In other words, when SQL statements exceed this +# threshold, then capture and send to the UI the current stack +# trace. This is helpful for pinpointing where long SQL calls +# originate from in an application. +# transaction_tracer.stack_trace_threshold = 0.5 + +# Determines whether the agent will capture query plans for slow +# SQL queries. Only supported in MySQL and PostgreSQL. Set this +# to "false" to turn it off. +# transaction_tracer.explain_enabled = true + +# Threshold for query execution time below which query plans +# will not not be captured. Relevant only when "explain_enabled" +# is true. +# transaction_tracer.explain_threshold = 0.5 + +# Space separated list of function or method names in form +# 'module:function' or 'module:class.function' for which +# additional function timing instrumentation will be added. +# transaction_tracer.function_trace = + +# The error collector captures information about uncaught +# exceptions or logged exceptions and sends them to UI for +# viewing. The error collector is enabled by default. Set this +# to "false" to turn it off. +error_collector.enabled = true + +# To stop specific errors from reporting to the UI, set this to +# a space separated list of the Python exception type names to +# ignore. The exception name should be of the form 'module:class'. +error_collector.ignore_errors = app.v2.errors:BadRequestError jsonschema.exceptions:ValidationError + +# Browser monitoring is the Real User Monitoring feature of the UI. +# For those Python web frameworks that are supported, this +# setting enables the auto-insertion of the browser monitoring +# JavaScript fragments. +# browser_monitoring.auto_instrument = true + +# A thread profiling session can be scheduled via the UI when +# this option is enabled. The thread profiler will periodically +# capture a snapshot of the call stack for each active thread in +# the application to construct a statistically representative +# call tree. +# thread_profiler.enabled = true + +# --------------------------------------------------------------------------- + +# +# The application environments. These are specific settings which +# override the common environment settings. The settings related to a +# specific environment will be used when the environment argument to the +# newrelic.agent.initialize() function has been defined to be either +# "development", "test", "staging" or "production". +# + +[newrelic:development] +# monitor_mode = false + +[newrelic:staging] +# app_name = Python Application (Staging) +# monitor_mode = true + +[newrelic:production] +# monitor_mode = true + +[newrelic:scratch] +# monitor_mode = false + +[newrelic:dev] +# monitor_mode = false + +# --------------------------------------------------------------------------- diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000000..9afdaa8ac3 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,4295 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.1" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, + {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, + {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, + {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, + {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, + {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, + {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, + {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, + {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, + {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, + {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, + {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "alembic" +version = "1.12.1" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, + {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["python-dateutil"] + +[[package]] +name = "amqp" +version = "5.2.0" +description = "Low-level AMQP client for Python (fork of amqplib)." +optional = false +python-versions = ">=3.6" +files = [ + {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, + {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, +] + +[package.dependencies] +vine = ">=5.0.0,<6.0.0" + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "apig-wsgi" +version = "2.18.0" +description = "Wrap a WSGI application in an AWS Lambda handler function for running on API Gateway or an ALB." +optional = false +python-versions = ">=3.8" +files = [ + {file = "apig_wsgi-2.18.0-py3-none-any.whl", hash = "sha256:9132330bd1b1e9d1365cc1873e36b508930db65c5ed4d35f6234d332a6453fb9"}, + {file = "apig_wsgi-2.18.0.tar.gz", hash = "sha256:7e46eb15b32f644caeb8065ad494beae4f3114dc10009384db9191f2b16eceec"}, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "aws-embedded-metrics" +version = "1.0.8" +description = "AWS Embedded Metrics Package" +optional = false +python-versions = "*" +files = [ + {file = "aws-embedded-metrics-1.0.8.tar.gz", hash = "sha256:8b8f404f36d18dc4e5fd89188fffd29ba37ef898f8d8377e71b3257dd98fa03e"}, + {file = "aws_embedded_metrics-1.0.8-py3-none-any.whl", hash = "sha256:58bac9ffae8131ecffe888934b1fb67329eb76a7482e330f8eec6f5f2ac3c7fe"}, +] + +[package.dependencies] +aiohttp = "*" + +[[package]] +name = "aws-xray-sdk" +version = "2.14.0" +description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." +optional = false +python-versions = ">=3.7" +files = [ + {file = "aws_xray_sdk-2.14.0-py2.py3-none-any.whl", hash = "sha256:cfbe6feea3d26613a2a869d14c9246a844285c97087ad8f296f901633554ad94"}, + {file = "aws_xray_sdk-2.14.0.tar.gz", hash = "sha256:aab843c331af9ab9ba5cefb3a303832a19db186140894a523edafc024cc0493c"}, +] + +[package.dependencies] +botocore = ">=1.11.3" +wrapt = "*" + +[[package]] +name = "awscli" +version = "1.33.5" +description = "Universal Command Line Environment for AWS." +optional = false +python-versions = ">=3.8" +files = [ + {file = "awscli-1.33.5-py3-none-any.whl", hash = "sha256:46eb5858f154723d3d11900b33035f24b51882758d5f3f753e472ca12375bc46"}, + {file = "awscli-1.33.5.tar.gz", hash = "sha256:eda29ad39b0907505f78d693e6cc1dc76c2d47a0e5cf5376e86a791d5e830535"}, +] + +[package.dependencies] +botocore = "1.34.123" +colorama = ">=0.2.5,<0.4.7" +docutils = ">=0.10,<0.17" +PyYAML = ">=3.10,<6.1" +rsa = ">=3.1.2,<4.8" +s3transfer = ">=0.10.0,<0.11.0" + +[[package]] +name = "awscli-cwlogs" +version = "1.4.6" +description = "AWSCLI CloudWatch Logs plugin" +optional = false +python-versions = "*" +files = [ + {file = "awscli-cwlogs-1.4.6.tar.gz", hash = "sha256:44d2fe77d109b7b630fb8f6c06760ff6c5ec9861be16413fd5b977f5a4971f83"}, +] + +[package.dependencies] +awscli = ">=1.11.41" +python-dateutil = ">=2.1" +requests = ">=2.18.0" +six = ">=1.1.0" + +[[package]] +name = "bcrypt" +version = "4.1.1" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "bcrypt-4.1.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:196008d91201bbb1aa4e666fee5e610face25d532e433a560cabb33bfdff958b"}, + {file = "bcrypt-4.1.1-cp37-abi3-macosx_13_0_universal2.whl", hash = "sha256:2e197534c884336f9020c1f3a8efbaab0aa96fc798068cb2da9c671818b7fbb0"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d573885b637815a7f3a3cd5f87724d7d0822da64b0ab0aa7f7c78bae534e86dc"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bab33473f973e8058d1b2df8d6e095d237c49fbf7a02b527541a86a5d1dc4444"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fb931cd004a7ad36a89789caf18a54c20287ec1cd62161265344b9c4554fdb2e"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:12f40f78dcba4aa7d1354d35acf45fae9488862a4fb695c7eeda5ace6aae273f"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2ade10e8613a3b8446214846d3ddbd56cfe9205a7d64742f0b75458c868f7492"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f33b385c3e80b5a26b3a5e148e6165f873c1c202423570fdf45fe34e00e5f3e5"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:755b9d27abcab678e0b8fb4d0abdebeea1f68dd1183b3f518bad8d31fa77d8be"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a7a7b8a87e51e5e8ca85b9fdaf3a5dc7aaf123365a09be7a27883d54b9a0c403"}, + {file = "bcrypt-4.1.1-cp37-abi3-win32.whl", hash = "sha256:3d6c4e0d6963c52f8142cdea428e875042e7ce8c84812d8e5507bd1e42534e07"}, + {file = "bcrypt-4.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:14d41933510717f98aac63378b7956bbe548986e435df173c841d7f2bd0b2de7"}, + {file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24c2ebd287b5b11016f31d506ca1052d068c3f9dc817160628504690376ff050"}, + {file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:476aa8e8aca554260159d4c7a97d6be529c8e177dbc1d443cb6b471e24e82c74"}, + {file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12611c4b0a8b1c461646228344784a1089bc0c49975680a2f54f516e71e9b79e"}, + {file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6450538a0fc32fb7ce4c6d511448c54c4ff7640b2ed81badf9898dcb9e5b737"}, + {file = "bcrypt-4.1.1.tar.gz", hash = "sha256:df37f5418d4f1cdcff845f60e747a015389fa4e63703c918330865e06ad80007"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "billiard" +version = "4.2.0" +description = "Python multiprocessing fork with improvements and bugfixes" +optional = false +python-versions = ">=3.7" +files = [ + {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, + {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, +] + +[[package]] +name = "black" +version = "23.12.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + +[[package]] +name = "blinker" +version = "1.7.0" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, + {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, +] + +[[package]] +name = "boto" +version = "2.49.0" +description = "Amazon Web Services Library" +optional = false +python-versions = "*" +files = [ + {file = "boto-2.49.0-py2.py3-none-any.whl", hash = "sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8"}, + {file = "boto-2.49.0.tar.gz", hash = "sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a"}, +] + +[[package]] +name = "boto3" +version = "1.34.100" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.100-py3-none-any.whl", hash = "sha256:bbe2bb0dfcd92380da2a2fa2c2f586ba06c118b796380b2d0f3d0ebd103ec28d"}, + {file = "boto3-1.34.100.tar.gz", hash = "sha256:016f6d66900bb1a835dea2063f1e91fc7057dbf7fb7df8add0706f0da9492631"}, +] + +[package.dependencies] +botocore = ">=1.34.100,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.123" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.123-py3-none-any.whl", hash = "sha256:8c34ada2a708c82e7174bff700611643db7ce2cb18f1130c35045c24310d299d"}, + {file = "botocore-1.34.123.tar.gz", hash = "sha256:a8577f6574600c4d159b5cd103ee05744a443d77f7778304e17307940b369c4f"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.20.11)"] + +[[package]] +name = "brotli" +version = "1.1.0" +description = "Python bindings for the Brotli compression library" +optional = false +python-versions = "*" +files = [ + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, + {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, + {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, + {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, + {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, + {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, + {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, + {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, + {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, + {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, + {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, + {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, + {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, + {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, + {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, + {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, + {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, + {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, +] + +[[package]] +name = "cachelib" +version = "0.12.0" +description = "A collection of cache libraries in the same API interface." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cachelib-0.12.0-py3-none-any.whl", hash = "sha256:038f4d855afc3eb8caab10458f6eac55c328911f9055824c22c2f259ef9ed3a3"}, + {file = "cachelib-0.12.0.tar.gz", hash = "sha256:8243029a028436fd23229113dee517c0700bb43a8a289ec5a963e4af9ca2b194"}, +] + +[[package]] +name = "cachetools" +version = "4.2.4" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = "~=3.5" +files = [ + {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, + {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, +] + +[[package]] +name = "celery" +version = "5.3.6" +description = "Distributed Task Queue." +optional = false +python-versions = ">=3.8" +files = [ + {file = "celery-5.3.6-py3-none-any.whl", hash = "sha256:9da4ea0118d232ce97dff5ed4974587fb1c0ff5c10042eb15278487cdd27d1af"}, + {file = "celery-5.3.6.tar.gz", hash = "sha256:870cc71d737c0200c397290d730344cc991d13a057534353d124c9380267aab9"}, +] + +[package.dependencies] +billiard = ">=4.2.0,<5.0" +boto3 = {version = ">=1.26.143", optional = true, markers = "extra == \"sqs\""} +click = ">=8.1.2,<9.0" +click-didyoumean = ">=0.3.0" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +kombu = [ + {version = ">=5.3.4,<6.0"}, + {version = ">=5.3.0", extras = ["sqs"], optional = true, markers = "extra == \"sqs\""}, +] +pycurl = {version = ">=7.43.0.5", optional = true, markers = "sys_platform != \"win32\" and platform_python_implementation == \"CPython\" and extra == \"sqs\""} +python-dateutil = ">=2.8.2" +tzdata = ">=2022.7" +urllib3 = {version = ">=1.26.16", optional = true, markers = "extra == \"sqs\""} +vine = ">=5.1.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=2.0.2)"] +auth = ["cryptography (==41.0.5)"] +azureblockblob = ["azure-storage-blob (>=12.15.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (>=3.25.0,<4)"] +consul = ["python-consul2 (==0.1.5)"] +cosmosdbsql = ["pydocumentdb (==2.3.5)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb (==1.14.2)"] +django = ["Django (>=2.2.28)"] +dynamodb = ["boto3 (>=1.26.143)"] +elasticsearch = ["elastic-transport (<=8.10.0)", "elasticsearch (<=8.11.0)"] +eventlet = ["eventlet (>=0.32.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +memcache = ["pylibmc (==1.6.3)"] +mongodb = ["pymongo[srv] (>=4.0.2)"] +msgpack = ["msgpack (==1.0.7)"] +pymemcache = ["python-memcached (==1.59)"] +pyro = ["pyro4 (==4.82)"] +pytest = ["pytest-celery (==0.0.0)"] +redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"] +s3 = ["boto3 (>=1.26.143)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem (==4.1.5)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard (==0.22.0)"] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-datetime" +version = "0.2" +description = "Datetime type support for click." +optional = false +python-versions = "*" +files = [ + {file = "click-datetime-0.2.tar.gz", hash = "sha256:c562ad24b3711784a655a49141b4a87933a78608fe66296259acae95fda5e115"}, + {file = "click_datetime-0.2-py2.py3-none-any.whl", hash = "sha256:7256ca518e648ada8e2550239ab328de125906e5b7199a5bd5bcbb4dfe28f946"}, +] + +[package.dependencies] +click = "*" + +[package.extras] +dev = ["wheel"] + +[[package]] +name = "click-didyoumean" +version = "0.3.0" +description = "Enables git-like *did-you-mean* feature in click" +optional = false +python-versions = ">=3.6.2,<4.0.0" +files = [ + {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, + {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, +] + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +optional = false +python-versions = "*" +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "click-repl" +version = "0.3.0" +description = "REPL plugin for Click" +optional = false +python-versions = ">=3.6" +files = [ + {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, + {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, +] + +[package.dependencies] +click = ">=7.0" +prompt-toolkit = ">=3.0.36" + +[package.extras] +testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] + +[[package]] +name = "configargparse" +version = "1.7" +description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." +optional = false +python-versions = ">=3.5" +files = [ + {file = "ConfigArgParse-1.7-py3-none-any.whl", hash = "sha256:d249da6591465c6c26df64a9f73d2536e743be2f244eb3ebe61114af2f94f86b"}, + {file = "ConfigArgParse-1.7.tar.gz", hash = "sha256:e7067471884de5478c58a511e529f0f9bd1c66bfef1dea90935438d6c23306d1"}, +] + +[package.extras] +test = ["PyYAML", "mock", "pytest"] +yaml = ["PyYAML"] + +[[package]] +name = "coverage" +version = "5.5" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +files = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] + +[package.dependencies] +toml = {version = "*", optional = true, markers = "extra == \"toml\""} + +[package.extras] +toml = ["toml"] + +[[package]] +name = "coveralls" +version = "3.3.1" +description = "Show coverage stats online via coveralls.io" +optional = false +python-versions = ">= 3.5" +files = [ + {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"}, + {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"}, +] + +[package.dependencies] +coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" +docopt = ">=0.6.1" +requests = ">=1.0.0" + +[package.extras] +yaml = ["PyYAML (>=3.10)"] + +[[package]] +name = "cryptography" +version = "42.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:de5086cd475d67113ccb6f9fae6d8fe3ac54a4f9238fd08bfdb07b03d791ff0a"}, + {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:935cca25d35dda9e7bd46a24831dfd255307c55a07ff38fd1a92119cffc34857"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20100c22b298c9eaebe4f0b9032ea97186ac2555f426c3e70670f2517989543b"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eb6368d5327d6455f20327fb6159b97538820355ec00f8cc9464d617caecead"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39d5c93e95bcbc4c06313fc6a500cee414ee39b616b55320c1904760ad686938"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d96ea47ce6d0055d5b97e761d37b4e84195485cb5a38401be341fabf23bc32a"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d1998e545081da0ab276bcb4b33cce85f775adb86a516e8f55b3dac87f469548"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93fbee08c48e63d5d1b39ab56fd3fdd02e6c2431c3da0f4edaf54954744c718f"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:90147dad8c22d64b2ff7331f8d4cddfdc3ee93e4879796f837bdbb2a0b141e0c"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4dcab7c25e48fc09a73c3e463d09ac902a932a0f8d0c568238b3696d06bf377b"}, + {file = "cryptography-42.0.3-cp37-abi3-win32.whl", hash = "sha256:1e935c2900fb53d31f491c0de04f41110351377be19d83d908c1fd502ae8daa5"}, + {file = "cryptography-42.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:762f3771ae40e111d78d77cbe9c1035e886ac04a234d3ee0856bf4ecb3749d54"}, + {file = "cryptography-42.0.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3ec384058b642f7fb7e7bff9664030011ed1af8f852540c76a1317a9dd0d20"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35772a6cffd1f59b85cb670f12faba05513446f80352fe811689b4e439b5d89e"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04859aa7f12c2b5f7e22d25198ddd537391f1695df7057c8700f71f26f47a129"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c3d1f5a1d403a8e640fa0887e9f7087331abb3f33b0f2207d2cc7f213e4a864c"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df34312149b495d9d03492ce97471234fd9037aa5ba217c2a6ea890e9166f151"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:de4ae486041878dc46e571a4c70ba337ed5233a1344c14a0790c4c4be4bbb8b4"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0fab2a5c479b360e5e0ea9f654bcebb535e3aa1e493a715b13244f4e07ea8eec"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25b09b73db78facdfd7dd0fa77a3f19e94896197c86e9f6dc16bce7b37a96504"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d5cf11bc7f0b71fb71af26af396c83dfd3f6eed56d4b6ef95d57867bf1e4ba65"}, + {file = "cryptography-42.0.3-cp39-abi3-win32.whl", hash = "sha256:0fea01527d4fb22ffe38cd98951c9044400f6eff4788cf52ae116e27d30a1ba3"}, + {file = "cryptography-42.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:2619487f37da18d6826e27854a7f9d4d013c51eafb066c80d09c63cf24505306"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ead69ba488f806fe1b1b4050febafdbf206b81fa476126f3e16110c818bac396"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:20180da1b508f4aefc101cebc14c57043a02b355d1a652b6e8e537967f1e1b46"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fbf0f3f0fac7c089308bd771d2c6c7b7d53ae909dce1db52d8e921f6c19bb3a"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c23f03cfd7d9826cdcbad7850de67e18b4654179e01fe9bc623d37c2638eb4ef"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db0480ffbfb1193ac4e1e88239f31314fe4c6cdcf9c0b8712b55414afbf80db4"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:6c25e1e9c2ce682d01fc5e2dde6598f7313027343bd14f4049b82ad0402e52cd"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9541c69c62d7446539f2c1c06d7046aef822940d248fa4b8962ff0302862cc1f"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b797099d221df7cce5ff2a1d272761d1554ddf9a987d3e11f6459b38cd300fd"}, + {file = "cryptography-42.0.3.tar.gz", hash = "sha256:069d2ce9be5526a44093a0991c450fe9906cdf069e0e7cd67d9dee49a62b9ebe"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "docopt" +version = "0.6.2" +description = "Pythonic argument parser, that will make you smile" +optional = false +python-versions = "*" +files = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] + +[[package]] +name = "docutils" +version = "0.16" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, + {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, +] + +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +optional = false +python-versions = ">=3.6" +files = [ + {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, + {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, +] + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "2.0.2" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.7" +files = [ + {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, + {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + +[[package]] +name = "fido2" +version = "0.9.3" +description = "Python based FIDO 2.0 library" +optional = false +python-versions = ">=2.7.6,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +files = [ + {file = "fido2-0.9.3.tar.gz", hash = "sha256:b45e89a6109cfcb7f1bb513776aa2d6408e95c4822f83a253918b944083466ec"}, +] + +[package.dependencies] +cryptography = ">=1.5" +six = "*" + +[package.extras] +pcsc = ["pyscard"] + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "flask" +version = "2.3.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, + {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=2.3.7" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-bcrypt" +version = "1.0.1" +description = "Brcrypt hashing for Flask." +optional = false +python-versions = "*" +files = [ + {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, + {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, +] + +[package.dependencies] +bcrypt = ">=3.1.1" +Flask = "*" + +[[package]] +name = "flask-cors" +version = "4.0.0" +description = "A Flask extension adding a decorator for CORS support" +optional = false +python-versions = "*" +files = [ + {file = "Flask-Cors-4.0.0.tar.gz", hash = "sha256:f268522fcb2f73e2ecdde1ef45e2fd5c71cc48fe03cffb4b441c6d1b40684eb0"}, + {file = "Flask_Cors-4.0.0-py2.py3-none-any.whl", hash = "sha256:bc3492bfd6368d27cfe79c7821df5a8a319e1a6d5eab277a3794be19bdc51783"}, +] + +[package.dependencies] +Flask = ">=0.9" + +[[package]] +name = "flask-login" +version = "0.6.3" +description = "User authentication and session management for Flask." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333"}, + {file = "Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d"}, +] + +[package.dependencies] +Flask = ">=1.0.4" +Werkzeug = ">=1.0.1" + +[[package]] +name = "flask-marshmallow" +version = "0.14.0" +description = "Flask + marshmallow for beautiful APIs" +optional = false +python-versions = "*" +files = [ + {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, + {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, +] + +[package.dependencies] +Flask = "*" +marshmallow = ">=2.0.0" +six = ">=1.9.0" + +[package.extras] +dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] +docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] +lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] +sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] +tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] + +[[package]] +name = "flask-migrate" +version = "2.7.0" +description = "SQLAlchemy database migrations for Flask applications using Alembic" +optional = false +python-versions = "*" +files = [ + {file = "Flask-Migrate-2.7.0.tar.gz", hash = "sha256:ae2f05671588762dd83a21d8b18c51fe355e86783e24594995ff8d7380dffe38"}, + {file = "Flask_Migrate-2.7.0-py2.py3-none-any.whl", hash = "sha256:26871836a4e46d2d590cf8e558c6d60039e1c003079b240689d845726b6b57c0"}, +] + +[package.dependencies] +alembic = ">=0.7" +Flask = ">=0.9" +Flask-SQLAlchemy = ">=1.0" + +[[package]] +name = "flask-redis" +version = "0.4.0" +description = "A nice way to use Redis in your Flask app" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "flask-redis-0.4.0.tar.gz", hash = "sha256:e1fccc11e7ea35c2a4d68c0b9aa58226a098e45e834d615c7b6c4928b01ddd6c"}, + {file = "flask_redis-0.4.0-py2.py3-none-any.whl", hash = "sha256:8d79eef4eb1217095edab603acc52f935b983ae4b7655ee7c82c0dfd87315d17"}, +] + +[package.dependencies] +Flask = ">=0.8" +redis = ">=2.7.6" + +[package.extras] +dev = ["coverage", "pre-commit", "pytest", "pytest-mock"] +tests = ["coverage", "pytest", "pytest-mock"] + +[[package]] +name = "Flask-SQLAlchemy" +version = "2.3.2.dev20231128" +description = "Adds SQLAlchemy support to your Flask application" +optional = false +python-versions = "*" +files = [] +develop = false + +[package.dependencies] +Flask = ">=0.10" +SQLAlchemy = ">=0.8.0" + +[package.source] +type = "git" +url = "https://github.com/pallets-eco/flask-sqlalchemy.git" +reference = "500e732dd1b975a56ab06a46bd1a20a21e682262" +resolved_reference = "500e732dd1b975a56ab06a46bd1a20a21e682262" + +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "frozenlist" +version = "1.4.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, + {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, + {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, + {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, + {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, + {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, + {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, + {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, + {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, + {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, +] + +[[package]] +name = "future" +version = "0.18.3" +description = "Clean single-source support for Python 3 and 2" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, +] + +[[package]] +name = "gevent" +version = "23.9.1" +description = "Coroutine-based network library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "gevent-23.9.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:a3c5e9b1f766a7a64833334a18539a362fb563f6c4682f9634dea72cbe24f771"}, + {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b101086f109168b23fa3586fccd1133494bdb97f86920a24dc0b23984dc30b69"}, + {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36a549d632c14684bcbbd3014a6ce2666c5f2a500f34d58d32df6c9ea38b6535"}, + {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:272cffdf535978d59c38ed837916dfd2b5d193be1e9e5dcc60a5f4d5025dd98a"}, + {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb8612787a7f4626aa881ff15ff25439561a429f5b303048f0fca8a1c781c39"}, + {file = "gevent-23.9.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d57737860bfc332b9b5aa438963986afe90f49645f6e053140cfa0fa1bdae1ae"}, + {file = "gevent-23.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5f3c781c84794926d853d6fb58554dc0dcc800ba25c41d42f6959c344b4db5a6"}, + {file = "gevent-23.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dbb22a9bbd6a13e925815ce70b940d1578dbe5d4013f20d23e8a11eddf8d14a7"}, + {file = "gevent-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:707904027d7130ff3e59ea387dddceedb133cc742b00b3ffe696d567147a9c9e"}, + {file = "gevent-23.9.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:45792c45d60f6ce3d19651d7fde0bc13e01b56bb4db60d3f32ab7d9ec467374c"}, + {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e24c2af9638d6c989caffc691a039d7c7022a31c0363da367c0d32ceb4a0648"}, + {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1ead6863e596a8cc2a03e26a7a0981f84b6b3e956101135ff6d02df4d9a6b07"}, + {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65883ac026731ac112184680d1f0f1e39fa6f4389fd1fc0bf46cc1388e2599f9"}, + {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7af500da05363e66f122896012acb6e101a552682f2352b618e541c941a011"}, + {file = "gevent-23.9.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c3e5d2fa532e4d3450595244de8ccf51f5721a05088813c1abd93ad274fe15e7"}, + {file = "gevent-23.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c84d34256c243b0a53d4335ef0bc76c735873986d478c53073861a92566a8d71"}, + {file = "gevent-23.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ada07076b380918829250201df1d016bdafb3acf352f35e5693b59dceee8dd2e"}, + {file = "gevent-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:921dda1c0b84e3d3b1778efa362d61ed29e2b215b90f81d498eb4d8eafcd0b7a"}, + {file = "gevent-23.9.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ed7a048d3e526a5c1d55c44cb3bc06cfdc1947d06d45006cc4cf60dedc628904"}, + {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c1abc6f25f475adc33e5fc2dbcc26a732608ac5375d0d306228738a9ae14d3b"}, + {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4368f341a5f51611411ec3fc62426f52ac3d6d42eaee9ed0f9eebe715c80184e"}, + {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52b4abf28e837f1865a9bdeef58ff6afd07d1d888b70b6804557e7908032e599"}, + {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52e9f12cd1cda96603ce6b113d934f1aafb873e2c13182cf8e86d2c5c41982ea"}, + {file = "gevent-23.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:de350fde10efa87ea60d742901e1053eb2127ebd8b59a7d3b90597eb4e586599"}, + {file = "gevent-23.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fde6402c5432b835fbb7698f1c7f2809c8d6b2bd9d047ac1f5a7c1d5aa569303"}, + {file = "gevent-23.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd6c32ab977ecf7c7b8c2611ed95fa4aaebd69b74bf08f4b4960ad516861517d"}, + {file = "gevent-23.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:455e5ee8103f722b503fa45dedb04f3ffdec978c1524647f8ba72b4f08490af1"}, + {file = "gevent-23.9.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7ccf0fd378257cb77d91c116e15c99e533374a8153632c48a3ecae7f7f4f09fe"}, + {file = "gevent-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d163d59f1be5a4c4efcdd13c2177baaf24aadf721fdf2e1af9ee54a998d160f5"}, + {file = "gevent-23.9.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7532c17bc6c1cbac265e751b95000961715adef35a25d2b0b1813aa7263fb397"}, + {file = "gevent-23.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:78eebaf5e73ff91d34df48f4e35581ab4c84e22dd5338ef32714264063c57507"}, + {file = "gevent-23.9.1-cp38-cp38-win32.whl", hash = "sha256:f632487c87866094546a74eefbca2c74c1d03638b715b6feb12e80120960185a"}, + {file = "gevent-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:62d121344f7465e3739989ad6b91f53a6ca9110518231553fe5846dbe1b4518f"}, + {file = "gevent-23.9.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:bf456bd6b992eb0e1e869e2fd0caf817f0253e55ca7977fd0e72d0336a8c1c6a"}, + {file = "gevent-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43daf68496c03a35287b8b617f9f91e0e7c0d042aebcc060cadc3f049aadd653"}, + {file = "gevent-23.9.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7c28e38dcde327c217fdafb9d5d17d3e772f636f35df15ffae2d933a5587addd"}, + {file = "gevent-23.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fae8d5b5b8fa2a8f63b39f5447168b02db10c888a3e387ed7af2bd1b8612e543"}, + {file = "gevent-23.9.1-cp39-cp39-win32.whl", hash = "sha256:2c7b5c9912378e5f5ccf180d1fdb1e83f42b71823483066eddbe10ef1a2fcaa2"}, + {file = "gevent-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:a2898b7048771917d85a1d548fd378e8a7b2ca963db8e17c6d90c76b495e0e2b"}, + {file = "gevent-23.9.1.tar.gz", hash = "sha256:72c002235390d46f94938a96920d8856d4ffd9ddf62a303a0d7c118894097e34"}, +] + +[package.dependencies] +cffi = {version = ">=1.12.2", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} +greenlet = {version = ">=2.0.0", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""} +"zope.event" = "*" +"zope.interface" = "*" + +[package.extras] +dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"] +docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"] +monitor = ["psutil (>=5.7.0)"] +recommended = ["cffi (>=1.12.2)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"] +test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests", "setuptools"] + +[[package]] +name = "geventhttpclient" +version = "2.0.11" +description = "http client library for gevent" +optional = false +python-versions = "*" +files = [ + {file = "geventhttpclient-2.0.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f509176bc7754b1181375a25ec6909425a5997e58c98ea29a36fe8b6a376852f"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cda51b46d8ab3993763a394ed6601137c32f70cff78dfe703edecb3dfa143009"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:072f24198c0f179fcd8567e9270d5cb78ceea1d562a55b052cd083cf4c67feef"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b97c84e9be76bdd726757437327be5446710eafb64f7097d8d86db9c0f7d280"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:abb32554c1ad103ed1114cee3d75fa6a3c5d8a0898e4e64db68f3fc0f11fb0de"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78a7e493e09d0aa4ba9651147d02fc555159371fecab0e4e96196c72f191322e"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e521089a3a95c98e1742f1a1ea41568b029bc2528cc6fc7ab91bb5d416f1f2c"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8329c60d94e688d75ec1c6f67a77ab96f726f8ea562a8d48afa1ed6470334a6f"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:572364fc4acd7ff2e77641e6bd1e64cf315d899a7fc48953eac1dd3b6865fd99"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:81e73ee32f4217072935825a0bad7264dc803b0d24cc4e2f4bfcac3fff49a899"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d79ee0d7ab5d775b056400155cab1e3547a7fa6511f6098e25613ed8705ae8b8"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-win32.whl", hash = "sha256:2911d3657e2426b6a2d59af0b52285c1a7c4a78d0e4d03ee4ec1d5195a25a09f"}, + {file = "geventhttpclient-2.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:a489573a0a0559f8960b38795dc53d1e222bc0978b211763d1303b2f94e4c3e0"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1e27a9521e0ad0d97d0ff81578fd4dd6ae9eee8095d46edb820dfda33c0bd233"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d54b886ce042186a4f731dcbcb4ffa8d674b0542907fc72de20d0b5088adc252"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2337e10e2ad20970436f216d7b3b8d1503f8e4645d439173a98b4b418fe5768"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f41bcdcec859264a1b6cc7c57bdb9411da8047f17b982cb62756bcc74a1b045b"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f5d73be013a7a2a357eb27d18e5990c773365f63f50a43eaf357d6efb1fd46a6"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4d86f042501a783e94188ef8b099f32bc4680f2423bbbb56f40158d4556a56b"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaa2bc31a38dbb387c7539cfa03d3bafaa32151972d34b42f2f648b66778e128"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3e24ff4c398f9e49c5c0740585f12fcf7033dc27a20ec884f3b2c729e2f47f14"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b70f80528ae74518a16214261abba2a276739e6e35ce518fdbd8be2a3f42f93a"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:efa467997f87d39f774ed1916a9e184c9a936f8fa90ab1a8ebf97aba2ee7ed63"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4597ea18ddc9838dc0e6cb9d5efb812191f2ca65ab38c115a56894045c73ea40"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-win32.whl", hash = "sha256:a4361c5a522d2a79d8a9047926b8f8926e0f797777da9f450d359bed9f33ac33"}, + {file = "geventhttpclient-2.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:f430257a7b0a75e7f4c0d6f4f3f8960d45b5aae56b8eca7988963487501a52a0"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a84f48f2eff42171cc446690baffa914122e88cea5b1de44cf6dd1c82b07623b"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a21dba9cf5e7511e76845f62dcf5072f4df7415bb8f20e47e0dfde675943a39"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99feb5581111c7ec44e1ce507b4420947b4c49b363b2fbc3edd543e2ac67a1e0"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bc799d50db685e093b5819459889f356dd7478a82af66f880832a95fcfa37c3"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94a8be54ac74ff6cf4703d049766e6ed07787fa9b6a2dd538c46f81de72ffdde"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71a9e152bb3cb86552f61659f3c7bdc272d9baf21726b3caceb5ab5d0e703fe6"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05a7699b49c9bc478b7ae165809ff97b21811a624791abe3927da5066128a10c"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:598951751b2162b0697cd5b6a9edcc65ec30f34388b6e09caaa0c453fb08fb6e"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4f0c773ceeeedfab56b24b97a0c8f04c58a716dfc7403e51ea898ad01599f1a6"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ee03ea884e6aa318078c0c7132d246fe92b51d587410532e63b864e6e61ea192"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:98a25e30ddccd49f80e037d48f136050b8f3c24ed9c6a69df7a643989f29c4e8"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-win32.whl", hash = "sha256:968587b59372e825411935e188b9a05dcdec6db6769be3eb3bba949cb414ae98"}, + {file = "geventhttpclient-2.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:465e62fb055e2ca5907606d32d421970f93506309b11a33b367eef33d95a6b7a"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ba597da51b59df28cf484326d7d59e33a57d3b32d7a4e1646c580f175354d6ce"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c125a225188bcacd51f05878d6e62554116a5be6b3a203cd0ba2460857bc8cd3"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f016093e8d26b724efdeda776968368fb591a57afbded2d86c408db8723e38ce"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a25a7fc768791cf9fe590f1b4f231727441e8f7e9279e8ae2bee83e0f3b010f8"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae71a7740526be78c6e899b03b63ab47a1a434332f7ca725dcdc916d938d46c6"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:06914f401541681d8cb834652f53e65a8179ea17dd0e496fd52712fd3f548fbb"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6ccdebfd20ab07ace7aa4dcd020f094d1cae237b4eacfca08ac523cac64e02d3"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:b2bea1386dbfd262571157da319e2285e20844fdbaabb22f95e784ca8b47d90c"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f468f88df7649bfcc6f74878182d0b7bcb3c23445a76be2b8b59e46224e2c244"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-win32.whl", hash = "sha256:d75c706f2a2401f703585cddf51cb0e43c28b7f12b1998c4a41fd6d14feec89b"}, + {file = "geventhttpclient-2.0.11-cp36-cp36m-win_amd64.whl", hash = "sha256:27f9e22a31451087854204f7f341bd4adc32050180580f74b5de75b61a3b405f"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:105af48455d4eecb4e0f2b2b7f766131811aa1a9a1e768fb020b9ae0ba840ee4"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb9e9c6f3fb902dd622964097df77e0ed9b249b8904b44fc3461734cc791b0aa"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1b73c37fbecb26475fa6e2d018dab4b5a03c7ba08c8907598605c874a70ee79"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09167de901f5b5273ddc14fd53512cc696495be07f02e3cb8a0335e1ecbff57e"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52ac561df8d363fe2e00ba4cccea470745129a48bb86f665a1447d0d68abec54"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ceb038cbf92105d124433066685c73e6a4a762c15885f00be2e25663468e4f29"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0b70eedf64c162067765ddfb30c8f52daeb875c717a3d25f81d5e411e5ac4367"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e87fb8bd748bf32e9902e9cbea3f20ff5456705d3f53f0a8ea0c4983594457a8"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0ae01d50529ac739573bc9cbc192b71bf9a13c3fcdbf2054952947a25e9f75a3"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-win32.whl", hash = "sha256:beb3a99e7a0a5130fbed2453348d81a78f2ef7d6aa326b5799c7f3dde88cabea"}, + {file = "geventhttpclient-2.0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:63fc49d73e70cab8316a4d0106c037a2a5d0f6650683af05d0d05d354b694d49"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:106e2ba0ce34a3501651995dd46ed38b87e7b5ada0fb977142d952661853f36a"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0edacd51cd9a6f0b88e25cb6c8744488ba6c7c22044b09de585b2a1224f2a7b9"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2214352e01fef4218bbbc61bd84af6f101bb5a33244088f6db28ff6d1141797f"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38384af2da776563a19951958df65e31ecc7b8d20788d43aff35ec909e4a115f"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33c4af3aa0312c27668171ea061d461f678848a09a32953b4d895f72a1bde0c9"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d461cdac133d4a4d173e2c1cc213f3a9924e6e092aeebd49bf8924719a073e0b"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ad49019e2828508526d35e7026b95a1fd9ef49ed0cdd2526a5cb3eb39583640"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a59b164a68bbb1a6f7bee859d7e75ef148b1e9bd72c4810c712cd49603dc37cd"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6cc44c57c02db1ded6f5a6bd4ccc385c4d13c7ae3528b831e70b5cc87e5b0ad1"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2d7318b3493c2e21df79429be3dbfefbc254c41a5b5c02c148a4521d59169ad6"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:40df90cd9b5f5f7355526cc538e626466cb60c2e737e9cb8958569377d568e9f"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-win32.whl", hash = "sha256:6f89edc316a8ff967a50c6f98277619786ed6abf2dd36ea905baf840a02b1b1b"}, + {file = "geventhttpclient-2.0.11-cp38-cp38-win_amd64.whl", hash = "sha256:b179a13c113a90c5501f1b1121bdc4c1f816d942280a9c3d2d46aff2bc97269a"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:63826170b520894578bd269b54139bb2f0cc2d96ae1f4a49b3928fe01ffa22ff"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a6fcc3968ea1adf764bc11b0e7d01b94ffe27bdd21c5b1d9e55be56de6a53c3"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c61c02c2d32e1b5b1f73d2b201c1e088e956b73e431ed6b5589010faed88380"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aec646409fa6eee277e33a1f4f1860d4c25e0448eedea149df92918d4502f38c"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b91290138518b201fba98bc82b062ef32e5e3da28843998902852298c354dcf"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b523860ee558f752847b29ad6678d1b8a40154d06bc7a8973132991aff727fdd"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5594bc889a686511039d1efd17473eecc4a91fa01d66a59bfa0a8cf04fb34551"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e573b86999cfeae38c4dd881f05818b9a60245a6763bc77efb48fa136cefdfcc"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a30bd715480ddbab0217764b516a65e36ecee2e81c9a04d074769eec6e0c1681"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:49ff1c00e64e0820a02fadc6a72b49ae8cc69028caa40170873a3012de98d475"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea232981e29869524e85b5e6c79ad64abf40dd7b6dc01be6765b5e6bd191fd73"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-win32.whl", hash = "sha256:a0b30fef1eb118927b5d8cab106198883f1bde021e9036277ea2f9e0020e0ad2"}, + {file = "geventhttpclient-2.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:844b30e3694a4d9518fe6f0b167fa3ffc3ea3444563d9fdd7a18a961f6a77d9c"}, + {file = "geventhttpclient-2.0.11-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94579ec289d46fca939b78cfe91732e82491f3dab03604f974a2e711654e7210"}, + {file = "geventhttpclient-2.0.11-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955b04deac7ea09a3d5183ba92a3d2a81121ad71d10f1489cb56fd31d0cb4ac4"}, + {file = "geventhttpclient-2.0.11-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7947aae2d7123a970669ebd763a09ef0c85104cda414689dd77b5e5a5c1f2a40"}, + {file = "geventhttpclient-2.0.11-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c483daa1deda0c52a77ed7af2906a38657c15120cb3240bf589dfb139255921"}, + {file = "geventhttpclient-2.0.11-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bc9634e025f17dc25987ebd5b0461659178ca57052ec70ad65052d0495111a74"}, + {file = "geventhttpclient-2.0.11-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9dca243f58f245872458647b0b6da4be9ce8d707639d76a50d2e8d3f4abb1659"}, + {file = "geventhttpclient-2.0.11-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64d36604974bc2b2ed0166bc666cead87f3c0f2d9487ef73d4e11df9ba6ebcc8"}, + {file = "geventhttpclient-2.0.11-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46677a56fa9f2f650be74024601b3a1968cfc58a434f5819fc2fc227bb292836"}, + {file = "geventhttpclient-2.0.11-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:989a1ed8dbdaf683dd5701087b736b93e6bacb3c29f4090014e64033cc8620e2"}, + {file = "geventhttpclient-2.0.11-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9b406ef64382a9c42b88331cdd6639a2b998e8034dbb1b702264d27c01f3ad5d"}, + {file = "geventhttpclient-2.0.11-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:713530c8f67a08ce0d5a4af80045112213c63eacefa1c08b76beebf780c755b0"}, + {file = "geventhttpclient-2.0.11-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd894ec63974fe4e916a1bf6efd35307b86ef53bd88e8fbe61020a289fee2f7c"}, + {file = "geventhttpclient-2.0.11-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e18e622171d09f068b26304b7d3c484d55952813e09eec5b3db1012dc53795de"}, + {file = "geventhttpclient-2.0.11-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce8421aa0a2307edf04a7086236e7e9f9188ab349154c409d723744032746eb"}, + {file = "geventhttpclient-2.0.11-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:237eba77682553253040588f136a2980dfcd71307202422a17b716e9d8be5614"}, + {file = "geventhttpclient-2.0.11-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:60641b8ff7077a57bb68f1189c8ae8ffc6f14ae238ba6a81748659c30894d580"}, + {file = "geventhttpclient-2.0.11-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5939bca6ab38a482352be8a7141570464d4d18281d8a3a2e2f7a82a0d8c38c4"}, + {file = "geventhttpclient-2.0.11-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:025026620e5a369844b576981ddab25d60e7e3bb0e0657c1fe9360a52769eb9d"}, + {file = "geventhttpclient-2.0.11-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b48b10e2a812b9297ad5c43e7a1a088220940060bbfb84fb721b17ab3012e0d"}, + {file = "geventhttpclient-2.0.11-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e572e63e51fde06c30beabf8021e7d3f93e198a9c241ef2f3ed16d7828966768"}, + {file = "geventhttpclient-2.0.11.tar.gz", hash = "sha256:549d0f3af08420b9ad2beeda211153c7605b5ba409b228db7f1b81c8bfbec6b4"}, +] + +[package.dependencies] +brotli = "*" +certifi = "*" +gevent = ">=0.13" +six = "*" + +[[package]] +name = "greenlet" +version = "2.0.2" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"] + +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +optional = false +python-versions = ">=3.5" +files = [ + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] + +[package.dependencies] +setuptools = ">=3.0" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "idna" +version = "2.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, + {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "iso8601" +version = "2.1.0" +description = "Simple module to parse ISO 8601 dates" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "iso8601-2.1.0-py3-none-any.whl", hash = "sha256:aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242"}, + {file = "iso8601-2.1.0.tar.gz", hash = "sha256:6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jinja2-cli" +version = "0.8.2" +description = "A CLI interface to Jinja2" +optional = false +python-versions = "*" +files = [ + {file = "jinja2-cli-0.8.2.tar.gz", hash = "sha256:a16bb1454111128e206f568c95938cdef5b5a139929378f72bb8cf6179e18e50"}, + {file = "jinja2_cli-0.8.2-py2.py3-none-any.whl", hash = "sha256:b91715c79496beaddad790171e7258a87db21c1a0b6d2b15bca3ba44b74aac5d"}, +] + +[package.dependencies] +jinja2 = "*" +pyyaml = {version = "*", optional = true, markers = "extra == \"yaml\""} + +[package.extras] +tests = ["flake8", "jinja2", "pytest"] +toml = ["jinja2", "toml"] +xml = ["jinja2", "xmltodict"] +yaml = ["jinja2", "pyyaml"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "kombu" +version = "5.3.4" +description = "Messaging library for Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "kombu-5.3.4-py3-none-any.whl", hash = "sha256:63bb093fc9bb80cfb3a0972336a5cec1fa7ac5f9ef7e8237c6bf8dda9469313e"}, + {file = "kombu-5.3.4.tar.gz", hash = "sha256:0bb2e278644d11dea6272c17974a3dbb9688a949f3bb60aeb5b791329c44fadc"}, +] + +[package.dependencies] +amqp = ">=5.1.1,<6.0.0" +boto3 = {version = ">=1.26.143", optional = true, markers = "extra == \"sqs\""} +pycurl = {version = ">=7.43.0.5", optional = true, markers = "sys_platform != \"win32\" and platform_python_implementation == \"CPython\" and extra == \"sqs\""} +urllib3 = {version = ">=1.26.16", optional = true, markers = "extra == \"sqs\""} +vine = "*" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.10.0)"] +azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] +confluentkafka = ["confluent-kafka (>=2.2.0)"] +consul = ["python-consul2"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=4.1.1)"] +msgpack = ["msgpack"] +pyro = ["pyro4"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=2.8.0)"] + +[[package]] +name = "locust" +version = "2.23.1" +description = "Developer friendly load testing framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "locust-2.23.1-py3-none-any.whl", hash = "sha256:96013a460a4b4d6d4fd46c70e6ff1fd2b6e03b48ddb1b48d1513d3134ba2cecf"}, + {file = "locust-2.23.1.tar.gz", hash = "sha256:6cc729729e5ebf5852fc9d845302cfcf0ab0132f198e68b3eb0c88b438b6a863"}, +] + +[package.dependencies] +ConfigArgParse = ">=1.5.5" +flask = ">=2.0.0" +Flask-Cors = ">=3.0.10" +Flask-Login = ">=0.6.3" +gevent = ">=22.10.2" +geventhttpclient = ">=2.0.11" +msgpack = ">=1.0.0" +psutil = ">=5.9.1" +pywin32 = {version = "*", markers = "platform_system == \"Windows\""} +pyzmq = ">=25.0.0" +requests = ">=2.26.0" +roundrobin = ">=0.0.2" +Werkzeug = ">=2.0.0" + +[[package]] +name = "lxml" +version = "4.9.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.35)"] + +[[package]] +name = "mako" +version = "1.3.0" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, + {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.0-py3-none-any.whl", hash = "sha256:e7997f83571c7fd476042c2c188e4ee8a78900ca5e74bd9c8097afa56624e9bd"}, + {file = "marshmallow-3.21.0.tar.gz", hash = "sha256:20f53be28c6e374a711a16165fb22a8dc6003e3f7cda1285e3ca777b9193885b"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "marshmallow-sqlalchemy" +version = "0.29.0" +description = "SQLAlchemy integration with the marshmallow (de)serialization library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "marshmallow-sqlalchemy-0.29.0.tar.gz", hash = "sha256:3523a774390ef0c1c0f7c708a7519809c5396cf608720f14f55c36f74ff5bbec"}, + {file = "marshmallow_sqlalchemy-0.29.0-py2.py3-none-any.whl", hash = "sha256:3cee0bf61ed10687c0a41448e1916649b28222334a02f7b937c39d1c69c18bee"}, +] + +[package.dependencies] +marshmallow = ">=3.0.0" +packaging = ">=21.3" +SQLAlchemy = ">=1.4.40,<3.0" + +[package.extras] +dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] +docs = ["alabaster (==0.7.13)", "sphinx (==6.1.3)", "sphinx-issues (==3.0.1)"] +lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)"] +tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mistune" +version = "0.8.4" +description = "The fastest markdown parser in pure Python" +optional = false +python-versions = "*" +files = [ + {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, + {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, +] + +[[package]] +name = "more-itertools" +version = "8.14.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.5" +files = [ + {file = "more-itertools-8.14.0.tar.gz", hash = "sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750"}, + {file = "more_itertools-8.14.0-py3-none-any.whl", hash = "sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2"}, +] + +[[package]] +name = "moto" +version = "4.2.14" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "moto-4.2.14-py2.py3-none-any.whl", hash = "sha256:6d242dbbabe925bb385ddb6958449e5c827670b13b8e153ed63f91dbdb50372c"}, + {file = "moto-4.2.14.tar.gz", hash = "sha256:8f9263ca70b646f091edcc93e97cda864a542e6d16ed04066b1370ed217bd190"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.12.201" +cryptography = ">=3.3.1" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.13.0" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +apigatewayv2 = ["PyYAML (>=5.1)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"] +ec2 = ["sshpubkeys (>=3.1.0)"] +glue = ["pyparsing (>=3.0.7)"] +iotdata = ["jsondiff (>=1.1.2)"] +proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.0)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.0)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +ssm = ["PyYAML (>=5.1)"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + +[[package]] +name = "msgpack" +version = "1.0.7" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"}, + {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"}, + {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"}, + {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"}, + {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"}, + {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"}, + {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"}, + {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"}, + {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"}, + {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"}, + {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"}, + {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "mypy" +version = "1.5.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ad3109bec37cc33654de8db30fe8ff3a1bb57ea65144167d68185e6dced9868d"}, + {file = "mypy-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ea3a0241cb005b0ccdbd318fb99619b21ae51bcf1660b95fc22e0e7d3ba4a1"}, + {file = "mypy-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fe816e26e676c1311b9e04fd576543b873576d39439f7c24c8e5c7728391ecf"}, + {file = "mypy-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42170e68adb1603ccdc55a30068f72bcfcde2ce650188e4c1b2a93018b826735"}, + {file = "mypy-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:d145b81a8214687cfc1f85c03663a5bbe736777410e5580e54d526e7e904f564"}, + {file = "mypy-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c36011320e452eb30bec38b9fd3ba20569dc9545d7d4540d967f3ea1fab9c374"}, + {file = "mypy-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3940cf5845b2512b3ab95463198b0cdf87975dfd17fdcc6ce9709a9abe09e69"}, + {file = "mypy-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9166186c498170e1ff478a7f540846b2169243feb95bc228d39a67a1a450cdc6"}, + {file = "mypy-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:725b57a19b7408ef66a0fd9db59b5d3e528922250fb56e50bded27fea9ff28f0"}, + {file = "mypy-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:eec5c927aa4b3e8b4781840f1550079969926d0a22ce38075f6cfcf4b13e3eb4"}, + {file = "mypy-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79c520aa24f21852206b5ff2cf746dc13020113aa73fa55af504635a96e62718"}, + {file = "mypy-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:769ddb6bfe55c2bd9c7d6d7020885a5ea14289619db7ee650e06b1ef0852c6f4"}, + {file = "mypy-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf18f8db7e5f060d61c91e334d3b96d6bb624ddc9ee8a1cde407b737acbca2c"}, + {file = "mypy-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a2500ad063413bc873ae102cf655bf49889e0763b260a3a7cf544a0cbbf7e70a"}, + {file = "mypy-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:84cf9f7d8a8a22bb6a36444480f4cbf089c917a4179fbf7eea003ea931944a7f"}, + {file = "mypy-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a551ed0fc02455fe2c1fb0145160df8336b90ab80224739627b15ebe2b45e9dc"}, + {file = "mypy-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:372fd97293ed0076d52695849f59acbbb8461c4ab447858cdaeaf734a396d823"}, + {file = "mypy-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8a7444d6fcac7e2585b10abb91ad900a576da7af8f5cffffbff6065d9115813"}, + {file = "mypy-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:35b13335c6c46a386577a51f3d38b2b5d14aa619e9633bb756bd77205e4bd09f"}, + {file = "mypy-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:2c9d570f53908cbea326ad8f96028a673b814d9dca7515bf71d95fa662c3eb6f"}, + {file = "mypy-1.5.0-py3-none-any.whl", hash = "sha256:69b32d0dedd211b80f1b7435644e1ef83033a2af2ac65adcdc87c38db68a86be"}, + {file = "mypy-1.5.0.tar.gz", hash = "sha256:f3460f34b3839b9bc84ee3ed65076eb827cd99ed13ed08d723f9083cada4a212"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nanoid" +version = "2.0.0" +description = "A tiny, secure, URL-friendly, unique string ID generator for Python" +optional = false +python-versions = "*" +files = [ + {file = "nanoid-2.0.0-py3-none-any.whl", hash = "sha256:90aefa650e328cffb0893bbd4c236cfd44c48bc1f2d0b525ecc53c3187b653bb"}, + {file = "nanoid-2.0.0.tar.gz", hash = "sha256:5a80cad5e9c6e9ae3a41fa2fb34ae189f7cb420b2a5d8f82bd9d23466e4efa68"}, +] + +[[package]] +name = "networkx" +version = "2.8.8" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.8" +files = [ + {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, + {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, +] + +[package.extras] +default = ["matplotlib (>=3.4)", "numpy (>=1.19)", "pandas (>=1.3)", "scipy (>=1.8)"] +developer = ["mypy (>=0.982)", "pre-commit (>=2.20)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.2)", "pydata-sphinx-theme (>=0.11)", "sphinx (>=5.2)", "sphinx-gallery (>=0.11)", "texext (>=0.6.6)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.9)", "sympy (>=1.10)"] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] + +[[package]] +name = "newrelic" +version = "8.10.0" +description = "New Relic Python Agent" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "newrelic-8.10.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:cf3b67327e64d2b50aec855821199b2bc46bc0c2d142df269d420748dd49b31b"}, + {file = "newrelic-8.10.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9601d886669fe1e0c23bbf91fb68ab23086011816ba96c6dd714c60dc0a74088"}, + {file = "newrelic-8.10.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:55a64d2abadf69bbc7bb01178332c4f25247689a97b01a62125d162ea7ec8974"}, + {file = "newrelic-8.10.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:b6cddd869ac8f7f32f6de8212ae878a21c9e63f2183601d239a76d38c5d5a366"}, + {file = "newrelic-8.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9af0130e1f1ca032c606d15a6d5558d27273a063b7c53702218b3beccd50b23"}, + {file = "newrelic-8.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2fd24b32dbf510e4e3fe40b71ad395dd73a4bb9f5eaf59eb5ff22ed76ba2d41"}, + {file = "newrelic-8.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2567ba9e29fd7b9f4c23cf16a5a149097eb0e5da587734c5a40732d75aaec189"}, + {file = "newrelic-8.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9c9f7842234a51e4a2fdafe42c42ebe0b6b1966279f2f91ec8a9c16480c2236"}, + {file = "newrelic-8.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:365d3b1a10d1021217beeb28a93c1356a9feb94bd24f02972691dc71227e40dc"}, + {file = "newrelic-8.10.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd0666557419dbe11b04e3b38480b3113b3c4670d42619420d60352a1956dd8"}, + {file = "newrelic-8.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722072d57e2d416de68b650235878583a2a8809ea39c7dd5c8c11a19089b7665"}, + {file = "newrelic-8.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbda843100c99ac3291701c0a70fedb705c0b0707800c60b93657d3985aae357"}, + {file = "newrelic-8.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ed36fb91f152128825459eae9a52da364352ea95bcd78b405b0a5b8057b2ed7"}, + {file = "newrelic-8.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc975c29548e25805ead794d9de7ab3cb8ba4a6a106098646e1ab03112d1432e"}, + {file = "newrelic-8.10.0.tar.gz", hash = "sha256:8a2271b76ea684a63936302579d6085d46a2b54042cb91dc9b0d71a0cd4dd38b"}, +] + +[package.extras] +infinite-tracing = ["grpcio", "protobuf"] + +[[package]] +name = "notifications-python-client" +version = "6.4.1" +description = "Python API client for GOV.UK Notify." +optional = false +python-versions = ">=3.6" +files = [ + {file = "notifications_python_client-6.4.1-py2.py3-none-any.whl", hash = "sha256:10b51f47aaebce7eaf0838f06def8660859ce8321bd0b180934e26895dae08df"}, +] + +[package.dependencies] +docopt = ">=0.3.0" +PyJWT = ">=1.5.1" +requests = ">=2.0.0" + +[[package]] +name = "notifications-utils" +version = "52.2.7" +description = "Shared python code for Notification - Provides logging utils etc." +optional = false +python-versions = "~3.10.9" +files = [] +develop = false + +[package.dependencies] +awscli = "1.33.5" +bleach = "6.1.0" +boto3 = "1.34.100" +cachetools = "4.2.4" +certifi = "^2023.7.22" +cryptography = "^42.0.3" +Flask = "2.3.3" +Flask-Redis = "0.4.0" +itsdangerous = "2.2.0" +Jinja2 = "^3.0.0" +markupsafe = "2.1.5" +mistune = "0.8.4" +ordered-set = "4.1.0" +phonenumbers = "8.13.36" +py_w3c = "0.3.1" +pypdf2 = "1.28.6" +python-json-logger = "2.0.7" +pytz = "2021.3" +PyYAML = "6.0.1" +requests = "2.31.0" +smartypants = "2.0.1" +statsd = "3.3.0" +werkzeug = "3.0.3" + +[package.source] +type = "git" +url = "https://github.com/cds-snc/notifier-utils.git" +reference = "52.2.7" +resolved_reference = "cd8943a30aa75f657951716111ff68df737b0fff" + +[[package]] +name = "ordered-set" +version = "4.1.0" +description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, + {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, +] + +[package.extras] +dev = ["black", "mypy", "pytest"] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "phonenumbers" +version = "8.13.36" +description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." +optional = false +python-versions = "*" +files = [ + {file = "phonenumbers-8.13.36-py2.py3-none-any.whl", hash = "sha256:68e06d20ae2f8fe5c7c7fd5b433f4257bc3cc747dc5196a029c7898ea449b012"}, + {file = "phonenumbers-8.13.36.tar.gz", hash = "sha256:b4e2371e35a1172aa2c91c9200b1e48e87b9355eb575768dd38058fc8d72c9ff"}, +] + +[[package]] +name = "platformdirs" +version = "4.0.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, + {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.41" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.41-py3-none-any.whl", hash = "sha256:f36fe301fafb7470e86aaf90f036eef600a3210be4decf461a5b1ca8403d3cb2"}, + {file = "prompt_toolkit-3.0.41.tar.gz", hash = "sha256:941367d97fc815548822aa26c2a269fdc4eb21e9ec05fc5d447cf09bad5d75f0"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.9.6" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, + {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, + {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, + {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, + {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, + {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, + {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, + {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, + {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, + {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "psycopg2-binary" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, +] + +[[package]] +name = "pwnedpasswords" +version = "2.0.0" +description = "A Python wrapper for Troy Hunt's Pwned Passwords API." +optional = false +python-versions = "*" +files = [ + {file = "pwnedpasswords-2.0.0-py2.py3-none-any.whl", hash = "sha256:8323cf3802d5021b1f212d5f441831057a80f33bf933eb0889020c7b698e96e6"}, +] + +[package.dependencies] +future = "*" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "py-w3c" +version = "0.3.1" +description = "W3C services for python." +optional = false +python-versions = "*" +files = [ + {file = "py_w3c-0.3.1.tar.gz", hash = "sha256:b643360fb2bf8fe87d7dc566c426a3417bfc09616b79966092fa0ecfabe92e83"}, +] + +[[package]] +name = "pyasn1" +version = "0.5.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, +] + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pycurl" +version = "7.45.2" +description = "PycURL -- A Python Interface To The cURL library" +optional = false +python-versions = ">=3.5" +files = [ + {file = "pycurl-7.45.2.tar.gz", hash = "sha256:5730590be0271364a5bddd9e245c9cc0fb710c4cbacbdd95264a3122d23224ca"}, +] + +[[package]] +name = "pydantic" +version = "2.5.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.14.5" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.5" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pypdf2" +version = "1.28.6" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +optional = false +python-versions = ">=2.7" +files = [ + {file = "PyPDF2-1.28.6-py3-none-any.whl", hash = "sha256:d7118f0187153257b1f906dcfcd8236608f4987b6a9999b7c5ad49114706a1ad"}, + {file = "PyPDF2-1.28.6.tar.gz", hash = "sha256:c0840835d18357b077da05bdad1423f5e29419f318135b6a6542895930dc4905"}, +] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-env" +version = "0.8.2" +description = "py.test plugin that allows you to add environment variables." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest_env-0.8.2-py3-none-any.whl", hash = "sha256:5e533273f4d9e6a41c3a3120e0c7944aae5674fa773b329f00a5eb1f23c53a38"}, + {file = "pytest_env-0.8.2.tar.gz", hash = "sha256:baed9b3b6bae77bd75b9238e0ed1ee6903a42806ae9d6aeffb8754cd5584d4ff"}, +] + +[package.dependencies] +pytest = ">=7.3.1" + +[package.extras] +test = ["coverage (>=7.2.7)", "pytest-mock (>=3.10)"] + +[[package]] +name = "pytest-forked" +version = "1.6.0" +description = "run tests in isolated forked subprocesses" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-forked-1.6.0.tar.gz", hash = "sha256:4dafd46a9a600f65d822b8f605133ecf5b3e1941ebb3588e943b4e3eb71a5a3f"}, + {file = "pytest_forked-1.6.0-py3-none-any.whl", hash = "sha256:810958f66a91afb1a1e2ae83089d8dc1cd2437ac96b12963042fbb9fb4d16af0"}, +] + +[package.dependencies] +py = "*" +pytest = ">=3.10" + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-mock-resources" +version = "2.10.0" +description = "A pytest plugin for easily instantiating reproducible mock resources." +optional = false +python-versions = ">=3.7,<4" +files = [ + {file = "pytest_mock_resources-2.10.0-py3-none-any.whl", hash = "sha256:f67eccc92d645328cc4bf532c9b08ff837b0f60a878d230c4ae4efc17f73a160"}, + {file = "pytest_mock_resources-2.10.0.tar.gz", hash = "sha256:acea4edd98de70d56e0949d8e5ab2f75d412b3a0f61437252284146f931f4e09"}, +] + +[package.dependencies] +filelock = {version = "*", optional = true, markers = "extra == \"docker\" or extra == \"postgres\" or extra == \"postgres-binary\" or extra == \"postgres-async\" or extra == \"redshift\" or extra == \"mongo\" or extra == \"moto\" or extra == \"redis\" or extra == \"mysql\""} +pytest = ">=1.0" +python-on-whales = {version = ">=0.22.0", optional = true, markers = "extra == \"docker\" or extra == \"postgres\" or extra == \"postgres-binary\" or extra == \"postgres-async\" or extra == \"redshift\" or extra == \"mongo\" or extra == \"moto\" or extra == \"redis\" or extra == \"mysql\""} +redis = {version = "*", optional = true, markers = "extra == \"redis\""} +sqlalchemy = ">1.0,<1.4.0 || >1.4.0,<1.4.1 || >1.4.1,<1.4.2 || >1.4.2,<1.4.3 || >1.4.3,<1.4.4 || >1.4.4,<1.4.5 || >1.4.5,<1.4.6 || >1.4.6,<1.4.7 || >1.4.7,<1.4.8 || >1.4.8,<1.4.9 || >1.4.9,<1.4.10 || >1.4.10,<1.4.11 || >1.4.11,<1.4.12 || >1.4.12,<1.4.13 || >1.4.13,<1.4.14 || >1.4.14,<1.4.15 || >1.4.15,<1.4.16 || >1.4.16,<1.4.17 || >1.4.17,<1.4.18 || >1.4.18,<1.4.19 || >1.4.19,<1.4.20 || >1.4.20,<1.4.21 || >1.4.21,<1.4.22 || >1.4.22,<1.4.23 || >1.4.23" +typing_extensions = "*" + +[package.extras] +docker = ["filelock", "python-on-whales (>=0.22.0)"] +mongo = ["filelock", "pymongo", "python-on-whales (>=0.22.0)"] +moto = ["boto3", "filelock", "python-on-whales (>=0.22.0)"] +mysql = ["filelock", "pymysql (>=1.0)", "python-on-whales (>=0.22.0)"] +postgres = ["filelock", "psycopg2", "python-on-whales (>=0.22.0)"] +postgres-async = ["asyncpg", "filelock", "python-on-whales (>=0.22.0)"] +postgres-binary = ["filelock", "psycopg2-binary", "python-on-whales (>=0.22.0)"] +redis = ["filelock", "python-on-whales (>=0.22.0)", "redis"] +redshift = ["boto3", "filelock", "moto", "python-on-whales (>=0.22.0)", "sqlparse"] + +[[package]] +name = "pytest-xdist" +version = "2.5.0" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, + {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, +] + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + +[[package]] +name = "python-magic" +version = "0.4.27" +description = "File type identification using libmagic" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, + {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, +] + +[[package]] +name = "python-on-whales" +version = "0.67.0" +description = "A Docker client for Python, designed to be fun and intuitive!" +optional = false +python-versions = "<4,>=3.8" +files = [ + {file = "python-on-whales-0.67.0.tar.gz", hash = "sha256:4f0e62fbbee31adf2ec255939b0952df9cef83e7b4c0e9961a2f2045682bc15e"}, + {file = "python_on_whales-0.67.0-py3-none-any.whl", hash = "sha256:7781cfe856e3cf44f94839210e8fa4e228595834f61a70577dad0b76b4915c90"}, +] + +[package.dependencies] +pydantic = ">=1.9,<2.0.dev0 || >=2.1.dev0,<3" +requests = "*" +tqdm = "*" +typer = ">=0.4.1" +typing-extensions = "*" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "pytz" +version = "2021.3" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyzmq" +version = "25.1.1" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, + {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, + {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, + {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, + {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, + {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, + {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, + {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, + {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, + {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, + {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, + {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "redis" +version = "5.0.1" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, + {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-file" +version = "1.5.1" +description = "File transport adapter for Requests" +optional = false +python-versions = "*" +files = [ + {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, + {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, +] + +[package.dependencies] +requests = ">=1.0.0" +six = "*" + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "responses" +version = "0.24.1" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "responses-0.24.1-py3-none-any.whl", hash = "sha256:a2b43f4c08bfb9c9bd242568328c65a34b318741d3fab884ac843c5ceeb543f9"}, + {file = "responses-0.24.1.tar.gz", hash = "sha256:b127c6ca3f8df0eb9cc82fd93109a3007a86acb24871834c47b77765152ecf8c"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "rfc3987" +version = "1.3.8" +description = "Parsing and validation of URIs (RFC 3986) and IRIs (RFC 3987)" +optional = false +python-versions = "*" +files = [ + {file = "rfc3987-1.3.8-py2.py3-none-any.whl", hash = "sha256:10702b1e51e5658843460b189b185c0366d2cf4cff716f13111b0ea9fd2dce53"}, + {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, +] + +[[package]] +name = "roundrobin" +version = "0.0.4" +description = "Collection of roundrobin utilities" +optional = false +python-versions = "*" +files = [ + {file = "roundrobin-0.0.4.tar.gz", hash = "sha256:7e9d19a5bd6123d99993fb935fa86d25c88bb2096e493885f61737ed0f5e9abd"}, +] + +[[package]] +name = "rsa" +version = "4.7.2" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.5, <4" +files = [ + {file = "rsa-4.7.2-py3-none-any.whl", hash = "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2"}, + {file = "rsa-4.7.2.tar.gz", hash = "sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "s3transfer" +version = "0.10.0" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, + {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "setuptools" +version = "69.0.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "simple-salesforce" +version = "1.12.6" +description = "A basic Salesforce.com REST API client." +optional = false +python-versions = "*" +files = [ + {file = "simple-salesforce-1.12.6.tar.gz", hash = "sha256:77590606c781905f6b75430562951dd2b062438da7f55fca2b61e4cde31df15b"}, + {file = "simple_salesforce-1.12.6-py2.py3-none-any.whl", hash = "sha256:66c74bee88d09ace46e4fc9c2f6b47c0d012817a764f70a5455d6dc2c7ed635c"}, +] + +[package.dependencies] +more-itertools = "*" +pyjwt = {version = "*", extras = ["crypto"]} +requests = ">=2.22.0" +typing-extensions = "*" +zeep = "*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smartypants" +version = "2.0.1" +description = "Python with the SmartyPants" +optional = false +python-versions = "*" +files = [ + {file = "smartypants-2.0.1-py2.py3-none-any.whl", hash = "sha256:8db97f7cbdf08d15b158a86037cd9e116b4cf37703d24e0419a0d64ca5808f0d"}, +] + +[[package]] +name = "sqlalchemy" +version = "1.4.52" +description = "Database Abstraction Library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, + {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, + {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, + {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, + {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, + {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, + {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, + {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, + {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlalchemy-stubs" +version = "0.4" +description = "SQLAlchemy stubs and mypy plugin" +optional = false +python-versions = "*" +files = [ + {file = "sqlalchemy-stubs-0.4.tar.gz", hash = "sha256:c665d6dd4482ef642f01027fa06c3d5e91befabb219dc71fc2a09e7d7695f7ae"}, + {file = "sqlalchemy_stubs-0.4-py3-none-any.whl", hash = "sha256:5eec7aa110adf9b957b631799a72fef396b23ff99fe296df726645d01e312aa5"}, +] + +[package.dependencies] +mypy = ">=0.790" +typing-extensions = ">=3.7.4" + +[[package]] +name = "sqlalchemy2-stubs" +version = "0.0.2a38" +description = "Typing Stubs for SQLAlchemy 1.4" +optional = false +python-versions = ">=3.6" +files = [ + {file = "sqlalchemy2-stubs-0.0.2a38.tar.gz", hash = "sha256:861d722abeb12f13eacd775a9f09379b11a5a9076f469ccd4099961b95800f9e"}, + {file = "sqlalchemy2_stubs-0.0.2a38-py3-none-any.whl", hash = "sha256:b62aa46943807287550e2033dafe07564b33b6a815fbaa3c144e396f9cc53bcb"}, +] + +[package.dependencies] +typing-extensions = ">=3.7.4" + +[[package]] +name = "statsd" +version = "3.3.0" +description = "A simple statsd client." +optional = false +python-versions = "*" +files = [ + {file = "statsd-3.3.0-py2.py3-none-any.whl", hash = "sha256:c610fb80347fca0ef62666d241bce64184bd7cc1efe582f9690e045c25535eaa"}, + {file = "statsd-3.3.0.tar.gz", hash = "sha256:e3e6db4c246f7c59003e51c9720a51a7f39a396541cb9b147ff4b14d15b5dd1f"}, +] + +[[package]] +name = "strict-rfc3339" +version = "0.7" +description = "Strict, simple, lightweight RFC3339 functions" +optional = false +python-versions = "*" +files = [ + {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, +] + +[[package]] +name = "tldextract" +version = "3.5.0" +description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." +optional = false +python-versions = ">=3.7" +files = [ + {file = "tldextract-3.5.0-py3-none-any.whl", hash = "sha256:2cb271ca8d06ea1630a1361b58edad14e0cf81f34ce3c90b052854528fe2a281"}, + {file = "tldextract-3.5.0.tar.gz", hash = "sha256:4df1c65b95be61d59428e8611e955e54e6f1d4483d3e8d5733d3a9062155e910"}, +] + +[package.dependencies] +filelock = ">=3.0.8" +idna = "*" +requests = ">=2.1.0" +requests-file = ">=1.4" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typer" +version = "0.9.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, + {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "types-boto" +version = "2.49.18.9" +description = "Typing stubs for boto" +optional = false +python-versions = "*" +files = [ + {file = "types-boto-2.49.18.9.tar.gz", hash = "sha256:fe711d938c237be50346a1bdc2231d3170453fe734789075dd088458e4e9442d"}, + {file = "types_boto-2.49.18.9-py3-none-any.whl", hash = "sha256:b44e8aead5e34bc336a813af90fdbb9ac5bb1091de839042628163463d9948eb"}, +] + +[[package]] +name = "types-mock" +version = "4.0.15.2" +description = "Typing stubs for mock" +optional = false +python-versions = "*" +files = [ + {file = "types-mock-4.0.15.2.tar.gz", hash = "sha256:83fe479741adb92210c3c92f006fe058297d5051e93c2cec36f1a9e0bae16e9e"}, + {file = "types_mock-4.0.15.2-py3-none-any.whl", hash = "sha256:39d489b6d9361b75448677680a3087701c0cfab61260363cfc0f646d2bf0a8b2"}, +] + +[[package]] +name = "types-pyopenssl" +version = "23.3.0.0" +description = "Typing stubs for pyOpenSSL" +optional = false +python-versions = ">=3.7" +files = [ + {file = "types-pyOpenSSL-23.3.0.0.tar.gz", hash = "sha256:5ffb077fe70b699c88d5caab999ae80e192fe28bf6cda7989b7e79b1e4e2dcd3"}, + {file = "types_pyOpenSSL-23.3.0.0-py3-none-any.whl", hash = "sha256:00171433653265843b7469ddb9f3c86d698668064cc33ef10537822156130ebf"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" + +[[package]] +name = "types-python-dateutil" +version = "2.8.19.20240106" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, + {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, +] + +[[package]] +name = "types-pytz" +version = "2022.7.1.2" +description = "Typing stubs for pytz" +optional = false +python-versions = "*" +files = [ + {file = "types-pytz-2022.7.1.2.tar.gz", hash = "sha256:487d3e8e9f4071eec8081746d53fa982bbc05812e719dcbf2ebf3d55a1a4cd28"}, + {file = "types_pytz-2022.7.1.2-py3-none-any.whl", hash = "sha256:40ca448a928d566f7d44ddfde0066e384f7ffbd4da2778e42a4570eaca572446"}, +] + +[[package]] +name = "types-redis" +version = "4.6.0.20240425" +description = "Typing stubs for redis" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-redis-4.6.0.20240425.tar.gz", hash = "sha256:9402a10ee931d241fdfcc04592ebf7a661d7bb92a8dea631279f0d8acbcf3a22"}, + {file = "types_redis-4.6.0.20240425-py3-none-any.whl", hash = "sha256:ac5bc19e8f5997b9e76ad5d9cf15d0392d9f28cf5fc7746ea4a64b989c45c6a8"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-pyOpenSSL = "*" + +[[package]] +name = "types-requests" +version = "2.31.0.20240406" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "unidecode" +version = "1.3.8" +description = "ASCII transliterations of Unicode text" +optional = false +python-versions = ">=3.5" +files = [ + {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, + {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, +] + +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "vine" +version = "5.1.0" +description = "Python promises." +optional = false +python-versions = ">=3.6" +files = [ + {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, + {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.12" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, + {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "werkzeug" +version = "3.0.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + +[[package]] +name = "yarl" +version = "1.9.3" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32435d134414e01d937cd9d6cc56e8413a8d4741dea36af5840c7750f04d16ab"}, + {file = "yarl-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9a5211de242754b5e612557bca701f39f8b1a9408dff73c6db623f22d20f470e"}, + {file = "yarl-1.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:525cd69eff44833b01f8ef39aa33a9cc53a99ff7f9d76a6ef6a9fb758f54d0ff"}, + {file = "yarl-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc94441bcf9cb8c59f51f23193316afefbf3ff858460cb47b5758bf66a14d130"}, + {file = "yarl-1.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e36021db54b8a0475805acc1d6c4bca5d9f52c3825ad29ae2d398a9d530ddb88"}, + {file = "yarl-1.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0f17d1df951336a02afc8270c03c0c6e60d1f9996fcbd43a4ce6be81de0bd9d"}, + {file = "yarl-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5f3faeb8100a43adf3e7925d556801d14b5816a0ac9e75e22948e787feec642"}, + {file = "yarl-1.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aed37db837ecb5962469fad448aaae0f0ee94ffce2062cf2eb9aed13328b5196"}, + {file = "yarl-1.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:721ee3fc292f0d069a04016ef2c3a25595d48c5b8ddc6029be46f6158d129c92"}, + {file = "yarl-1.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b8bc5b87a65a4e64bc83385c05145ea901b613d0d3a434d434b55511b6ab0067"}, + {file = "yarl-1.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:dd952b9c64f3b21aedd09b8fe958e4931864dba69926d8a90c90d36ac4e28c9a"}, + {file = "yarl-1.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:c405d482c320a88ab53dcbd98d6d6f32ada074f2d965d6e9bf2d823158fa97de"}, + {file = "yarl-1.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9df9a0d4c5624790a0dea2e02e3b1b3c69aed14bcb8650e19606d9df3719e87d"}, + {file = "yarl-1.9.3-cp310-cp310-win32.whl", hash = "sha256:d34c4f80956227f2686ddea5b3585e109c2733e2d4ef12eb1b8b4e84f09a2ab6"}, + {file = "yarl-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:cf7a4e8de7f1092829caef66fd90eaf3710bc5efd322a816d5677b7664893c93"}, + {file = "yarl-1.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d61a0ca95503867d4d627517bcfdc28a8468c3f1b0b06c626f30dd759d3999fd"}, + {file = "yarl-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73cc83f918b69110813a7d95024266072d987b903a623ecae673d1e71579d566"}, + {file = "yarl-1.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d81657b23e0edb84b37167e98aefb04ae16cbc5352770057893bd222cdc6e45f"}, + {file = "yarl-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a1a8443091c7fbc17b84a0d9f38de34b8423b459fb853e6c8cdfab0eacf613"}, + {file = "yarl-1.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe34befb8c765b8ce562f0200afda3578f8abb159c76de3ab354c80b72244c41"}, + {file = "yarl-1.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c757f64afe53a422e45e3e399e1e3cf82b7a2f244796ce80d8ca53e16a49b9f"}, + {file = "yarl-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a57b41a0920b9a220125081c1e191b88a4cdec13bf9d0649e382a822705c65"}, + {file = "yarl-1.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:632c7aeb99df718765adf58eacb9acb9cbc555e075da849c1378ef4d18bf536a"}, + {file = "yarl-1.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b0b8c06afcf2bac5a50b37f64efbde978b7f9dc88842ce9729c020dc71fae4ce"}, + {file = "yarl-1.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1d93461e2cf76c4796355494f15ffcb50a3c198cc2d601ad8d6a96219a10c363"}, + {file = "yarl-1.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4003f380dac50328c85e85416aca6985536812c082387255c35292cb4b41707e"}, + {file = "yarl-1.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4d6d74a97e898c1c2df80339aa423234ad9ea2052f66366cef1e80448798c13d"}, + {file = "yarl-1.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b61e64b06c3640feab73fa4ff9cb64bd8182de52e5dc13038e01cfe674ebc321"}, + {file = "yarl-1.9.3-cp311-cp311-win32.whl", hash = "sha256:29beac86f33d6c7ab1d79bd0213aa7aed2d2f555386856bb3056d5fdd9dab279"}, + {file = "yarl-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:f7271d6bd8838c49ba8ae647fc06469137e1c161a7ef97d778b72904d9b68696"}, + {file = "yarl-1.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:dd318e6b75ca80bff0b22b302f83a8ee41c62b8ac662ddb49f67ec97e799885d"}, + {file = "yarl-1.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4b1efb11a8acd13246ffb0bee888dd0e8eb057f8bf30112e3e21e421eb82d4a"}, + {file = "yarl-1.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c6f034386e5550b5dc8ded90b5e2ff7db21f0f5c7de37b6efc5dac046eb19c10"}, + {file = "yarl-1.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd49a908cb6d387fc26acee8b7d9fcc9bbf8e1aca890c0b2fdfd706057546080"}, + {file = "yarl-1.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa4643635f26052401750bd54db911b6342eb1a9ac3e74f0f8b58a25d61dfe41"}, + {file = "yarl-1.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e741bd48e6a417bdfbae02e088f60018286d6c141639359fb8df017a3b69415a"}, + {file = "yarl-1.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c86d0d0919952d05df880a1889a4f0aeb6868e98961c090e335671dea5c0361"}, + {file = "yarl-1.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d5434b34100b504aabae75f0622ebb85defffe7b64ad8f52b8b30ec6ef6e4b9"}, + {file = "yarl-1.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79e1df60f7c2b148722fb6cafebffe1acd95fd8b5fd77795f56247edaf326752"}, + {file = "yarl-1.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:44e91a669c43f03964f672c5a234ae0d7a4d49c9b85d1baa93dec28afa28ffbd"}, + {file = "yarl-1.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3cfa4dbe17b2e6fca1414e9c3bcc216f6930cb18ea7646e7d0d52792ac196808"}, + {file = "yarl-1.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:88d2c3cc4b2f46d1ba73d81c51ec0e486f59cc51165ea4f789677f91a303a9a7"}, + {file = "yarl-1.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cccdc02e46d2bd7cb5f38f8cc3d9db0d24951abd082b2f242c9e9f59c0ab2af3"}, + {file = "yarl-1.9.3-cp312-cp312-win32.whl", hash = "sha256:96758e56dceb8a70f8a5cff1e452daaeff07d1cc9f11e9b0c951330f0a2396a7"}, + {file = "yarl-1.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:c4472fe53ebf541113e533971bd8c32728debc4c6d8cc177f2bff31d011ec17e"}, + {file = "yarl-1.9.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:126638ab961633f0940a06e1c9d59919003ef212a15869708dcb7305f91a6732"}, + {file = "yarl-1.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c99ddaddb2fbe04953b84d1651149a0d85214780e4d0ee824e610ab549d98d92"}, + {file = "yarl-1.9.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dab30b21bd6fb17c3f4684868c7e6a9e8468078db00f599fb1c14e324b10fca"}, + {file = "yarl-1.9.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:828235a2a169160ee73a2fcfb8a000709edf09d7511fccf203465c3d5acc59e4"}, + {file = "yarl-1.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc391e3941045fd0987c77484b2799adffd08e4b6735c4ee5f054366a2e1551d"}, + {file = "yarl-1.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51382c72dd5377861b573bd55dcf680df54cea84147c8648b15ac507fbef984d"}, + {file = "yarl-1.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:28a108cb92ce6cf867690a962372996ca332d8cda0210c5ad487fe996e76b8bb"}, + {file = "yarl-1.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8f18a7832ff85dfcd77871fe677b169b1bc60c021978c90c3bb14f727596e0ae"}, + {file = "yarl-1.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:7eaf13af79950142ab2bbb8362f8d8d935be9aaf8df1df89c86c3231e4ff238a"}, + {file = "yarl-1.9.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:66a6dbf6ca7d2db03cc61cafe1ee6be838ce0fbc97781881a22a58a7c5efef42"}, + {file = "yarl-1.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a0a4f3aaa18580038cfa52a7183c8ffbbe7d727fe581300817efc1e96d1b0e9"}, + {file = "yarl-1.9.3-cp37-cp37m-win32.whl", hash = "sha256:946db4511b2d815979d733ac6a961f47e20a29c297be0d55b6d4b77ee4b298f6"}, + {file = "yarl-1.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2dad8166d41ebd1f76ce107cf6a31e39801aee3844a54a90af23278b072f1ccf"}, + {file = "yarl-1.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bb72d2a94481e7dc7a0c522673db288f31849800d6ce2435317376a345728225"}, + {file = "yarl-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9a172c3d5447b7da1680a1a2d6ecdf6f87a319d21d52729f45ec938a7006d5d8"}, + {file = "yarl-1.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2dc72e891672343b99db6d497024bf8b985537ad6c393359dc5227ef653b2f17"}, + {file = "yarl-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8d51817cf4b8d545963ec65ff06c1b92e5765aa98831678d0e2240b6e9fd281"}, + {file = "yarl-1.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53ec65f7eee8655bebb1f6f1607760d123c3c115a324b443df4f916383482a67"}, + {file = "yarl-1.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cfd77e8e5cafba3fb584e0f4b935a59216f352b73d4987be3af51f43a862c403"}, + {file = "yarl-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e73db54c967eb75037c178a54445c5a4e7461b5203b27c45ef656a81787c0c1b"}, + {file = "yarl-1.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09c19e5f4404574fcfb736efecf75844ffe8610606f3fccc35a1515b8b6712c4"}, + {file = "yarl-1.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6280353940f7e5e2efaaabd686193e61351e966cc02f401761c4d87f48c89ea4"}, + {file = "yarl-1.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c25ec06e4241e162f5d1f57c370f4078797ade95c9208bd0c60f484834f09c96"}, + {file = "yarl-1.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7217234b10c64b52cc39a8d82550342ae2e45be34f5bff02b890b8c452eb48d7"}, + {file = "yarl-1.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4ce77d289f8d40905c054b63f29851ecbfd026ef4ba5c371a158cfe6f623663e"}, + {file = "yarl-1.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5f74b015c99a5eac5ae589de27a1201418a5d9d460e89ccb3366015c6153e60a"}, + {file = "yarl-1.9.3-cp38-cp38-win32.whl", hash = "sha256:8a2538806be846ea25e90c28786136932ec385c7ff3bc1148e45125984783dc6"}, + {file = "yarl-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:6465d36381af057d0fab4e0f24ef0e80ba61f03fe43e6eeccbe0056e74aadc70"}, + {file = "yarl-1.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2f3c8822bc8fb4a347a192dd6a28a25d7f0ea3262e826d7d4ef9cc99cd06d07e"}, + {file = "yarl-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7831566595fe88ba17ea80e4b61c0eb599f84c85acaa14bf04dd90319a45b90"}, + {file = "yarl-1.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff34cb09a332832d1cf38acd0f604c068665192c6107a439a92abfd8acf90fe2"}, + {file = "yarl-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe8080b4f25dfc44a86bedd14bc4f9d469dfc6456e6f3c5d9077e81a5fedfba7"}, + {file = "yarl-1.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8535e111a064f3bdd94c0ed443105934d6f005adad68dd13ce50a488a0ad1bf3"}, + {file = "yarl-1.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d155a092bf0ebf4a9f6f3b7a650dc5d9a5bbb585ef83a52ed36ba46f55cc39d"}, + {file = "yarl-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:778df71c8d0c8c9f1b378624b26431ca80041660d7be7c3f724b2c7a6e65d0d6"}, + {file = "yarl-1.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9f9cafaf031c34d95c1528c16b2fa07b710e6056b3c4e2e34e9317072da5d1a"}, + {file = "yarl-1.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ca6b66f69e30f6e180d52f14d91ac854b8119553b524e0e28d5291a724f0f423"}, + {file = "yarl-1.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0e7e83f31e23c5d00ff618045ddc5e916f9e613d33c5a5823bc0b0a0feb522f"}, + {file = "yarl-1.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:af52725c7c39b0ee655befbbab5b9a1b209e01bb39128dce0db226a10014aacc"}, + {file = "yarl-1.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0ab5baaea8450f4a3e241ef17e3d129b2143e38a685036b075976b9c415ea3eb"}, + {file = "yarl-1.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d350388ba1129bc867c6af1cd17da2b197dff0d2801036d2d7d83c2d771a682"}, + {file = "yarl-1.9.3-cp39-cp39-win32.whl", hash = "sha256:e2a16ef5fa2382af83bef4a18c1b3bcb4284c4732906aa69422cf09df9c59f1f"}, + {file = "yarl-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:d92d897cb4b4bf915fbeb5e604c7911021a8456f0964f3b8ebbe7f9188b9eabb"}, + {file = "yarl-1.9.3-py3-none-any.whl", hash = "sha256:271d63396460b6607b588555ea27a1a02b717ca2e3f2cf53bdde4013d7790929"}, + {file = "yarl-1.9.3.tar.gz", hash = "sha256:4a14907b597ec55740f63e52d7fee0e9ee09d5b9d57a4f399a7423268e457b57"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zeep" +version = "4.2.1" +description = "A Python SOAP client" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zeep-4.2.1-py3-none-any.whl", hash = "sha256:6754feb4c34a4b6d65fbc359252bf6654dcce3937bf1d95aae4402a60a8f5939"}, + {file = "zeep-4.2.1.tar.gz", hash = "sha256:72093acfdb1d8360ed400869b73fbf1882b95c4287f798084c42ee0c1ff0e425"}, +] + +[package.dependencies] +attrs = ">=17.2.0" +isodate = ">=0.5.4" +lxml = ">=4.6.0" +platformdirs = ">=1.4.0" +pytz = "*" +requests = ">=2.7.0" +requests-file = ">=1.5.1" +requests-toolbelt = ">=0.7.1" + +[package.extras] +async = ["httpx (>=0.15.0)"] +docs = ["sphinx (>=1.4.0)"] +test = ["coverage[toml] (==5.2.1)", "flake8 (==3.8.3)", "flake8-blind-except (==0.1.1)", "flake8-debugger (==3.2.1)", "flake8-imports (==0.1.1)", "freezegun (==0.3.15)", "isort (==5.3.2)", "pretend (==1.0.9)", "pytest (==6.2.5)", "pytest-asyncio", "pytest-cov (==2.8.1)", "pytest-httpx", "requests-mock (>=0.7.0)"] +xmlsec = ["xmlsec (>=0.6.1)"] + +[[package]] +name = "zope-event" +version = "5.0" +description = "Very basic event publishing system" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, + {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx"] +test = ["zope.testrunner"] + +[[package]] +name = "zope-interface" +version = "6.1" +description = "Interfaces for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"}, + {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9bc671626281f6045ad61d93a60f52fd5e8209b1610972cf0ef1bbe6d808e3"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe81def9cf3e46f16ce01d9bfd8bea595e06505e51b7baf45115c77352675fd"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc998f6de015723196a904045e5a2217f3590b62ea31990672e31fbc5370b41"}, + {file = "zope.interface-6.1-cp310-cp310-win_amd64.whl", hash = "sha256:239a4a08525c080ff833560171d23b249f7f4d17fcbf9316ef4159f44997616f"}, + {file = "zope.interface-6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ffdaa5290422ac0f1688cb8adb1b94ca56cee3ad11f29f2ae301df8aecba7d1"}, + {file = "zope.interface-6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c15ca9248f2e095ef2e93af2d633358c5f048c49fbfddf5fdfc47d5e263736"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b012d023b4fb59183909b45d7f97fb493ef7a46d2838a5e716e3155081894605"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97806e9ca3651588c1baaebb8d0c5ee3db95430b612db354c199b57378312ee8"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddbab55a2473f1d3b8833ec6b7ac31e8211b0aa608df5ab09ce07f3727326de"}, + {file = "zope.interface-6.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0da79117952a9a41253696ed3e8b560a425197d4e41634a23b1507efe3273f1"}, + {file = "zope.interface-6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8bb9c990ca9027b4214fa543fd4025818dc95f8b7abce79d61dc8a2112b561a"}, + {file = "zope.interface-6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51b64432eed4c0744241e9ce5c70dcfecac866dff720e746d0a9c82f371dfa7"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa6fd016e9644406d0a61313e50348c706e911dca29736a3266fc9e28ec4ca6d"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c8cf55261e15590065039696607f6c9c1aeda700ceee40c70478552d323b3ff"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30506bcb03de8983f78884807e4fd95d8db6e65b69257eea05d13d519b83ac0"}, + {file = "zope.interface-6.1-cp312-cp312-win_amd64.whl", hash = "sha256:e33e86fd65f369f10608b08729c8f1c92ec7e0e485964670b4d2633a4812d36b"}, + {file = "zope.interface-6.1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:2f8d89721834524a813f37fa174bac074ec3d179858e4ad1b7efd4401f8ac45d"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13b7d0f2a67eb83c385880489dbb80145e9d344427b4262c49fbf2581677c11c"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef43ee91c193f827e49599e824385ec7c7f3cd152d74cb1dfe02cb135f264d83"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e441e8b7d587af0414d25e8d05e27040d78581388eed4c54c30c0c91aad3a379"}, + {file = "zope.interface-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89b28772fc2562ed9ad871c865f5320ef761a7fcc188a935e21fe8b31a38ca9"}, + {file = "zope.interface-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70d2cef1bf529bff41559be2de9d44d47b002f65e17f43c73ddefc92f32bf00f"}, + {file = "zope.interface-6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad54ed57bdfa3254d23ae04a4b1ce405954969c1b0550cc2d1d2990e8b439de1"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef467d86d3cfde8b39ea1b35090208b0447caaabd38405420830f7fd85fbdd56"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af47f10cfc54c2ba2d825220f180cc1e2d4914d783d6fc0cd93d43d7bc1c78b"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9559138690e1bd4ea6cd0954d22d1e9251e8025ce9ede5d0af0ceae4a401e43"}, + {file = "zope.interface-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:964a7af27379ff4357dad1256d9f215047e70e93009e532d36dcb8909036033d"}, + {file = "zope.interface-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:387545206c56b0315fbadb0431d5129c797f92dc59e276b3ce82db07ac1c6179"}, + {file = "zope.interface-6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57d0a8ce40ce440f96a2c77824ee94bf0d0925e6089df7366c2272ccefcb7941"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ebc4d34e7620c4f0da7bf162c81978fce0ea820e4fa1e8fc40ee763839805f3"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a804abc126b33824a44a7aa94f06cd211a18bbf31898ba04bd0924fbe9d282d"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f294a15f7723fc0d3b40701ca9b446133ec713eafc1cc6afa7b3d98666ee1ac"}, + {file = "zope.interface-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a41f87bb93b8048fe866fa9e3d0c51e27fe55149035dcf5f43da4b56732c0a40"}, + {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] +test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] + +[metadata] +lock-version = "2.0" +python-versions = "~3.10.9" +content-hash = "d95fee17c6e12a5dbec8ad0bdb8e256aadee291d2c2306c4b4f11e3db07fe006" diff --git a/pull_request_template.md b/pull_request_template.md index f1ffb136c6..483d2b2639 100644 --- a/pull_request_template.md +++ b/pull_request_template.md @@ -1,65 +1,26 @@ # Summary | Résumé -> 1-3 sentence description of the changed you're proposing, including a link to -> a GitHub Issue # or Trello card if applicable. +_TODO: 1-3 sentence description of the changed you're proposing._ ---- +## Related Issues | Cartes liées -> Description en 1 à 3 phrases de la modification proposée, avec un lien vers le -> problème (« issue ») GitHub ou la fiche Trello, le cas échéant. +* https://app.zenhub.com/workspaces/notify-planning-614b3ad91bc2030015ed22f5/issues/gh/cds-snc/notification-planning/1 +* https://app.zenhub.com/workspaces/notify-planning-core-6411dfb7c95fb80014e0cab0/issues/gh/cds-snc/notification-planning-core/1 # Test instructions | Instructions pour tester la modification -> Sequential steps (1., 2., 3., ...) that describe how to test this change. This -> will help a developer test things out without too much detective work. Also, -> include any environmental setup steps that aren't in the normal README steps -> and/or any time-based elements that this requires. - ---- - -> Étapes consécutives (1., 2., 3., …) qui décrivent la façon de tester la -> modification. Elles aideront les développeurs à faire des tests sans avoir à -> jouer au détective. Veuillez aussi inclure toutes les étapes de configuration -> de l’environnement qui ne font pas partie des étapes normales dans le fichier -> README et tout élément temporel requis. +_TODO: Fill in test instructions for the reviewer._ # Release Instructions | Instructions pour le déploiement None. -> Necessary steps to perform before and after the deployment of these changes. -> For example, emptying the cache on a feature that changes the cache data -> structure in Redis could be mentioned. - ---- - -> Étapes nécessaires à exécuter avant et après le déploiement des changements -> introduits par cette proposition. Par exemple, vider la cache suite à des -> changements modifiant une structure de données de la cache pourrait être -> mentionné. - # Reviewer checklist | Liste de vérification du réviseur -This is a suggested checklist of questions reviewers might ask during their -review | Voici une suggestion de liste de vérification comprenant des questions -que les réviseurs pourraient poser pendant leur examen : - +- [ ] This PR does not break existing functionality. +- [ ] This PR does not violate GCNotify's privacy policies. +- [ ] This PR does not raise new security concerns. Refer to our GC Notify Risk Register document on our Google drive. +- [ ] This PR does not significantly alter performance. +- [ ] Additional required documentation resulting of these changes is covered (such as the README, setup instructions, a related ADR or the technical documentation). -- [ ] Is the code maintainable? | Est-ce que le code peut être maintenu? -- [ ] Have you tested it? | L’avez-vous testé? -- [ ] Are there automated tests? | Y a-t-il des tests automatisés? -- [ ] Does this cause automated test coverage to drop? | Est-ce que ça entraîne - une baisse de la quantité de code couvert par les tests automatisés? -- [ ] Does this break existing functionality? | Est-ce que ça brise une - fonctionnalité existante? -- [ ] Does this change the privacy policy? | Est-ce que ça entraîne une - modification de la politique de confidentialité? -- [ ] Does this introduce any security concerns? | Est-ce que ça introduit des - préoccupations liées à la sécurité? -- [ ] Does this significantly alter performance? | Est-ce que ça modifie de - façon importante la performance? -- [ ] What is the risk level of using added dependencies? | Quel est le degré de - risque d’utiliser des dépendances ajoutées? -- [ ] Should any documentation be updated as a result of this? (i.e. README - setup, etc.) | Faudra-t-il mettre à jour la documentation à la suite de ce - changement (fichier README, etc.)? +> ⚠ If boxes cannot be checked off before merging the PR, they should be moved to the "Release Instructions" section with appropriate steps required to verify before release. For example, changes to celery code may require tests on staging to verify that performance has not been affected. \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 72c8a22225..e2dcebd99b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,111 @@ [tool.black] line-length = 130 -target-version = ['py37', 'py38', 'py39'] +target-version = ['py310'] include = '(app|migrations|tests)/.*\.pyi?$' + +[tool.poetry] +name = "notification-api" +version = "0.1.0" +description = "Public-facing REST API for Notification built on the GOV.UK Notify platform." +authors = ["Canadian Digital Service"] +license = "MIT license" +readme = "README.md" +packages = [] + +[tool.poetry.scripts] +notify-api = "" + +[build-system] +requires = ["poetry>=1.3.2"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry.dependencies] +python = "~3.10.9" +apig-wsgi = "2.18.0" +boto = "2.49.0" +cffi = "1.16.0" +celery = {extras = ["sqs"], version = "5.3.6"} +docopt = "0.6.2" +environs = "9.5.0" # pyup: <9.3.3 # marshmallow v3 throws errors" +fido2 = "0.9.3" +Flask-Bcrypt = "1.0.1" +Flask-Migrate = "2.7.0" +Flask-SQLAlchemy = { git = "https://github.com/pallets-eco/flask-sqlalchemy.git", rev = "500e732dd1b975a56ab06a46bd1a20a21e682262"} +#git+https://github.com/mitsuhiko/flask-sqlalchemy.git@500e732dd1b975a56ab06a46bd1a20a21e682262#egg=Flask-SQLAlchemy==2.3.2.dev20190108 +Flask = "2.3.3" +click-datetime = "0.2" +gevent = "23.9.1" + +gunicorn = "20.1.0" +iso8601 = "2.1.0" +jsonschema = "3.2.0" +marshmallow-sqlalchemy = "0.29.0" +marshmallow = "3.21.0" +python-magic = "0.4.27" +psycopg2-binary = "2.9.9" +PyJWT = "2.8.0" +pytz = "2021.3" +PyYAML = "6.0.1" + +cachelib = "0.12.0" +SQLAlchemy = "1.4.52" +newrelic = "8.10.0" +notifications-python-client = "6.4.1" +python-dotenv = "1.0.1" +pwnedpasswords = "2.0.0" +tldextract = "3.5.0" +nanoid = "2.0.0" +unidecode = "1.3.8" +more-itertools = "8.14.0" +# PaaS +awscli-cwlogs = "1.4.6" +aws-embedded-metrics = "1.0.8" +# Putting upgrade on hold due to new version introducing breaking changes +Werkzeug = "3.0.3" +MarkupSafe = "2.1.5" +# REVIEW: v2 is using sha512 instead of sha1 by default (in v1) +itsdangerous = "2.2.0" +notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.2.7" } + +# rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8 +typing-extensions = "4.10.0" +greenlet = "2.0.2" +simple-salesforce = "^1.12.3" + +# Pinned dependencies +certifi = "^2023.7.22" # pinned for security reasons: https://github.com/cds-snc/notification-api/security/dependabot/119 +idna = "2.10" # pinned to align with test moto dependency requirements (for <=2.9) +flask-marshmallow = "0.14.0" +aws-xray-sdk = "^2.14.0" + +[tool.poetry.group.test.dependencies] +flake8 = "6.1.0" +isort = "5.13.2" +moto = "4.2.14" +idna = "2.10" +pytest = "7.4.4" +pytest-env = "0.8.2" +pytest-mock = "3.12.0" +pytest-cov = "3.0.0" +coveralls = "3.3.1" +pytest-xdist = "2.5.0" +freezegun = "1.4.0" +requests-mock = "1.11.0" +# optional requirements for jsonschema +strict-rfc3339 = "0.7" +rfc3987 = "1.3.8" +# used for creating manifest file locally +jinja2-cli = { extras = ["yaml"], version = "0.8.2" } +black = "23.12.1" +locust = "2.23.1" +mypy = "1.5" +sqlalchemy-stubs = "0.4" +sqlalchemy2-stubs = "0.0.2a38" +networkx = "2.8.8" # not directly required, pinned by Snyk to avoid a vulnerability +pytest-mock-resources = { extras = ["redis"], version = "2.10.0" } +types-boto = "2.49.18.9" +types-mock = "4.0.15.2" +types-python-dateutil = "2.8.19.20240106" +types-pytz = "2022.7.1.2" +types-requests = "2.31.0.20240406" +types-redis = "4.6.0.20240425" diff --git a/pytest.ini b/pytest.ini index 9f5b793a96..0ab0190e73 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,27 +3,27 @@ testpaths = tests env = NOTIFY_ENVIRONMENT=test NOTIFICATION_QUEUE_PREFIX=testing - MLWR_HOST=https://mlwr.ca - MLWR_USER=mlwr_user - MLWR_KEY=a_long_key AWS_ACCESS_KEY_ID='testing' AWS_SECRET_ACCESS_KEY='testing' AWS_SECURITY_TOKEN='testing' AWS_SESSION_TOKEN='testing' AWS_US_TOLL_FREE_NUMBER='+18005555555' FRESH_DESK_PRODUCT_ID=42 - ZENDESK_API_URL = https://zendesk-test.com - ZENDESK_API_KEY = zendesk-api-key FRESH_DESK_API_URL=https://freshdesk-test.com FRESH_DESK_API_KEY=freshdesk-api-key - ZENDESK_SELL_API_URL=https://zendesksell-test.com - ZENDESK_SELL_API_KEY=zendesksell-api-key AWS_REGION=ca-central-1 ASSET_DOMAIN=assets.notification.canada.ca NOTIFY_EMAIL_DOMAIN=notification.canada.ca + AWS_EMF_ENVIRONMENT=local + D:FF_CELERY_CUSTOM_TASK_PARAMS=True + D:FF_SALESFORCE_CONTACT=True + D:FF_CLOUDWATCH_METRICS_ENABLED=True + D:REDIS_URL=redis://localhost:6380 + D:SALESFORCE_USERNAME=testuser + D:SALESFORCE_PASSWORD=testpassword D:DOCUMENTATION_DOMAIN=documentation.notification.canada.ca D:SQLALCHEMY_DATABASE_URI=postgresql://postgres:postgres@localhost:5432/test_notification_api D:SQLALCHEMY_DATABASE_READER_URI=postgresql://reader:postgres@localhost:5432/test_notification_api -addopts = -v -p no:warnings +addopts = -v -p no:warnings -n1 diff --git a/renovate.json b/renovate.json new file mode 100644 index 0000000000..f4c7e18b3d --- /dev/null +++ b/renovate.json @@ -0,0 +1,6 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "local>cds-snc/renovate-config" + ] +} \ No newline at end of file diff --git a/requirements-app.txt b/requirements-app.txt deleted file mode 100644 index 2b038d0d43..0000000000 --- a/requirements-app.txt +++ /dev/null @@ -1,57 +0,0 @@ -# Run `make freeze-requirements` to update requirements.txt -# with package version changes made in requirements-app.txt - -aws-wsgi==0.2.7 -boto==2.49.0 -cffi==1.14.5 -celery[sqs]==5.0.5 -docopt==0.6.2 -fido2==0.9.1 -Flask-Bcrypt==0.7.1 -flask-marshmallow==0.14.0 -Flask-Migrate==2.7.0 -git+https://github.com/mitsuhiko/flask-sqlalchemy.git@500e732dd1b975a56ab06a46bd1a20a21e682262#egg=Flask-SQLAlchemy==2.3.2.dev20190108 -Flask==1.1.2 -click-datetime==0.2 -eventlet==0.30.2 # currently 0.31.0+ breaks gunicorn. Test the docker image if upgrading! -gunicorn==20.1.0 -iso8601==0.1.14 -idna==2.8 # pinned to align with test moto dependency requirements -jsonschema==3.2.0 -marshmallow-sqlalchemy==0.23.1 # pyup: <0.24.0 # marshmallow v3 throws errors -marshmallow==2.21.0 # pyup: <3 # v3 throws errors -python-magic==0.4.22 -psycopg2-binary==2.8.6 -PyJWT==2.1.0 -PyYAML==5.4.1 -SQLAlchemy==1.3.23 -sentry-sdk[flask]==1.0.0 -cachelib==0.1.1 - -newrelic==6.2.0.156 -notifications-python-client==6.0.2 -python-dotenv==0.17.1 -pwnedpasswords==2.0.0 -tldextract==3.1.0 -nanoid==2.0.0 -unidecode==1.2.0 - - -# PaaS -awscli-cwlogs>=1.4.6,<1.5 - - -# Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default -itsdangerous==0.24 # pyup: <1.0.0 - -git+https://github.com/cds-snc/notifier-utils.git@46.3.0#egg=notifications-utils - -# MLWR -socketio-client==0.5.6 -requests -requests[security] -pycryptodome - -git+https://bitbucket.org/cse-assemblyline/assemblyline_client.git@v3.7.3#egg=assemblyline_client==v3.7.3 - -rsa>=4.1 # not directly required, pinned by Snyk to avoid a vulnerability diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index f304fa7d62..0000000000 --- a/requirements.txt +++ /dev/null @@ -1,119 +0,0 @@ -# pyup: ignore file -# This file is autogenerated. Do not edit it manually. -# Run `make freeze-requirements` to update requirements.txt -# with package version changes made in requirements-app.txt - -aws-wsgi==0.2.7 -boto==2.49.0 -cffi==1.14.5 -celery[sqs]==5.0.5 -docopt==0.6.2 -fido2==0.9.1 -Flask-Bcrypt==0.7.1 -flask-marshmallow==0.14.0 -Flask-Migrate==2.7.0 -git+https://github.com/mitsuhiko/flask-sqlalchemy.git@500e732dd1b975a56ab06a46bd1a20a21e682262#egg=Flask-SQLAlchemy==2.3.2.dev20190108 -Flask==1.1.2 -click-datetime==0.2 -eventlet==0.30.2 # currently 0.31.0+ breaks gunicorn. Test the docker image if upgrading! -gunicorn==20.1.0 -iso8601==0.1.14 -idna==2.8 # pinned to align with test moto dependency requirements -jsonschema==3.2.0 -marshmallow-sqlalchemy==0.23.1 # pyup: <0.24.0 # marshmallow v3 throws errors -marshmallow==2.21.0 # pyup: <3 # v3 throws errors -python-magic==0.4.22 -psycopg2-binary==2.8.6 -PyJWT==2.1.0 -PyYAML==5.4.1 -SQLAlchemy==1.3.23 -sentry-sdk[flask]==1.0.0 -cachelib==0.1.1 - -newrelic==6.2.0.156 -notifications-python-client==6.0.2 -python-dotenv==0.17.1 -pwnedpasswords==2.0.0 -tldextract==3.1.0 -nanoid==2.0.0 -unidecode==1.2.0 - - -# PaaS -awscli-cwlogs>=1.4.6,<1.5 - - -# Putting upgrade on hold due to v1.0.0 using sha512 instead of sha1 by default -itsdangerous==0.24 # pyup: <1.0.0 - -git+https://github.com/cds-snc/notifier-utils.git@46.3.0#egg=notifications-utils - -# MLWR -socketio-client==0.5.6 -requests -requests[security] -pycryptodome - -git+https://bitbucket.org/cse-assemblyline/assemblyline_client.git@v3.7.3#egg=assemblyline_client==v3.7.3 - -rsa>=4.1 # not directly required, pinned by Snyk to avoid a vulnerability - -## The following requirements were added by pip freeze: -alembic==1.7.3 -amqp==5.0.6 -attrs==21.2.0 -awscli==1.19.58 -bcrypt==3.2.0 -billiard==3.6.4.0 -bleach==3.3.0 -blinker==1.4 -boto3==1.17.58 -botocore==1.20.58 -cachetools==4.2.1 -certifi==2021.5.30 -chardet==4.0.0 -click==7.1.2 -click-didyoumean==0.0.3 -click-plugins==1.1.1 -click-repl==0.2.0 -colorama==0.4.3 -cryptography==3.4.8 -dnspython==1.16.0 -docutils==0.15.2 -filelock==3.0.12 -flask-redis==0.4.0 -future==0.18.2 -greenlet==1.1.1 -Jinja2==2.11.3 -jmespath==0.10.0 -kombu==5.1.0 -Mako==1.1.5 -MarkupSafe==2.0.1 -mistune==0.8.4 -orderedset==2.0.3 -packaging==21.0 -phonenumbers==8.12.21 -prompt-toolkit==3.0.20 -py-w3c==0.3.1 -pyasn1==0.4.8 -pycparser==2.20 -pycurl==7.43.0.5 -pyOpenSSL==20.0.1 -pyparsing==2.4.7 -PyPDF2==1.26.0 -pyrsistent==0.18.0 -python-dateutil==2.8.2 -python-json-logger==2.0.1 -pytz==2021.1 -redis==3.5.3 -requests-file==1.5.1 -s3transfer==0.4.2 -six==1.16.0 -smartypants==2.0.1 -statsd==3.3.0 -urllib3==1.26.7 -vine==5.0.0 -wcwidth==0.2.5 -webencodings==0.5.1 -websocket-client==1.2.1 -Werkzeug==1.0.1 diff --git a/requirements_for_test.txt b/requirements_for_test.txt deleted file mode 100644 index 9e526ef5fd..0000000000 --- a/requirements_for_test.txt +++ /dev/null @@ -1,23 +0,0 @@ --r requirements.txt -flake8==3.8.4 -isort==5.6.4 -moto==1.3.14 -idna==2.8 -pytest==3.10.1 # pyup: <4 -pytest-env==0.6.2 -pytest-mock==1.10.4 -pytest-cov==2.6.1 -coveralls==1.11.1 -pytest-xdist==1.27.0 # pyup: ignore, version 1.28.0 requires pytest >= 4.4 -freezegun==1.0.0 -requests-mock==1.8.0 -# optional requirements for jsonschema -strict-rfc3339==0.7 -rfc3987==1.3.8 -# used for creating manifest file locally -jinja2-cli[yaml]==0.6.0 -black==21.5b2 -locust==1.5.3 -mypy==0.812 -sqlalchemy-stubs==0.4 -networkx>=2.6 # not directly required, pinned by Snyk to avoid a vulnerability \ No newline at end of file diff --git a/runtime.txt b/runtime.txt deleted file mode 100644 index d66fd40d87..0000000000 --- a/runtime.txt +++ /dev/null @@ -1 +0,0 @@ -python-3.9.1 \ No newline at end of file diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 46aafd72ee..c23371d0e3 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -4,7 +4,7 @@ make generate-version-file # Install Python development dependencies -pip3 install -r requirements_for_test.txt +poetry install --only test # Upgrade databases flask db upgrade diff --git a/scripts/cwcheck.sh b/scripts/cwcheck.sh new file mode 100755 index 0000000000..6adf6f23bb --- /dev/null +++ b/scripts/cwcheck.sh @@ -0,0 +1,21 @@ +#!/bin/bash +# Check and see if this is running in K8s and if so, wait for cloudwatch agent +if [ -n "${STATSD_HOST}" ]; then + echo "Initializing... Waiting for CWAgent to become ready within the next 30 seconds." + timeout=30 + while [ $timeout -gt 0 ]; do + if nc -vz "$STATSD_HOST" 25888; then + echo "CWAgent is Ready." + break + else + echo "Waiting for CWAgent to become ready." + sleep 1 + timeout=$((timeout - 1)) + fi + done + + if [ $timeout -eq 0 ]; then + echo "Timeout reached. CWAgent did not become ready in 30 seconds." + exit 1 + fi +fi diff --git a/scripts/enlarge_db/README.md b/scripts/enlarge_db/README.md new file mode 100644 index 0000000000..b36090390b --- /dev/null +++ b/scripts/enlarge_db/README.md @@ -0,0 +1,22 @@ +# Enlarge DB + +## Purpose + +The purpose of this script is add rows to the notification_history table. This is useful in estimating how long database-related infrastructure operations will take when performed on a database the same size as that in production. + +## How to use + +The script should be run in the same environment as api. Locally this can be in the api repo devcontainer, while in AWS the api kubernetes pod would be preferred. + +To add 2000 rows to the table with a client_reference of "test2000" run + +``` +cd scripts/enlarge_db +python enlarge_db.py -n 2000 -r test2000 +``` + +The new notifications are added in batches to improve performance, with a default batch size of 10000. You may use a different batch with the `-c` parameter, for example + +``` +python enlarge_db.py -n 2000 -c 101 -r test2000x101 +``` diff --git a/scripts/enlarge_db/enlarge_db.py b/scripts/enlarge_db/enlarge_db.py new file mode 100644 index 0000000000..649e6f3032 --- /dev/null +++ b/scripts/enlarge_db/enlarge_db.py @@ -0,0 +1,55 @@ + +import argparse +import sys +from datetime import datetime +from typing import List + +from flask import Flask + +sys.path.append("../..") +from app import create_app, create_uuid, db # noqa: E402 +from app.config import Config # noqa: E402 +from app.models import NotificationHistory # noqa: E402 + +DEFAULT_CHUNK_SIZE = 10000 + + +def create_notifications(n: int, ref: str) -> List[NotificationHistory]: + notifications = [ + NotificationHistory( + id=create_uuid(), + created_at=datetime.utcnow(), + template_id=Config.NEW_USER_EMAIL_VERIFICATION_TEMPLATE_ID, + template_version=1, + service_id=Config.NOTIFY_SERVICE_ID, + notification_type="email", + key_type='normal', + client_reference=ref, + ) + for _ in range(n) + ] + return notifications + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("-n", "--notifications", default=1, type=int, help="number of notifications to add to the notification_history table (default 1)") + parser.add_argument("-r", "--reference", default="manually created", type=str, help="client reference to use for the notifications (default 'manually created')") + parser.add_argument("-c", "--chunksize", default=DEFAULT_CHUNK_SIZE, type=int, help=f"chunk size for bulk_save_objects (default {DEFAULT_CHUNK_SIZE})") + args = parser.parse_args() + + app = Flask("enlarge_db") + create_app(app) + + for notifications_done in range(0, args.notifications, args.chunksize): + notifications = create_notifications(min(args.chunksize, args.notifications - notifications_done), args.reference) + print(f"Adding {len(notifications)} notifications to notification_history") + with app.app_context(): + try: + db.session.bulk_save_objects(notifications) + db.session.commit() + except Exception as e: + print(f"Error adding notifications: {e}") + db.session.rollback() + sys.exit(1) + print(f"Done {notifications_done+len(notifications)} / {args.notifications}") diff --git a/scripts/internal-stress-test/.env.example b/scripts/internal-stress-test/.env.example new file mode 100644 index 0000000000..6e797ff095 --- /dev/null +++ b/scripts/internal-stress-test/.env.example @@ -0,0 +1,3 @@ +API_KEY= +EMAIL_TEMPLATE_ID= +SMS_TEMPLATE_ID= diff --git a/scripts/internal-stress-test/README.md b/scripts/internal-stress-test/README.md new file mode 100644 index 0000000000..dec3a024f5 --- /dev/null +++ b/scripts/internal-stress-test/README.md @@ -0,0 +1,48 @@ +# Internal stress test + +## Goals + +The goal of this code is to test Notify internals by putting as many emails or sms as possible through the code up to the point of handing to AWS for delivery. + +## How to configure + +Run the setup.sh to install the python pre-requisites or run in the repo devcontainer. + +Default configuration is in the `locust.conf` file. + +The python file `internal_stress_test.py` requires environment variables `API_KEY`, `EMAIL_TEMPLATE_ID`, and `SMS_TEMPLATE_ID`. The template should have no variables. + +``` +API_KEY=gcntfy-notAKey-f6c7cc49-b5b7-4e67-a8ff-24f34be34523-f6c7cc49-b5b7-4e67-a8ff-24f34be34523 +EMAIL_TEMPLATE_ID=f6c7cc49-b5b7-4e67-a8ff-24f34be34523 +SMS_TEMPLATE_ID=f6c7aa49-b5b7-4e67-a8ff-24f34be34523 +``` +These can be in a `.env` file in the internal_stress_test directory. + +__See Last Pass note "Soak Test Staging API Key and Template" in Shared-New-Notify-Staging folder__ + +Note that the default configuration in `locust.conf` is to send one email per second. + +## How to run + +There are two ways to run Locust, with the UI or headless. + +### With the UI + +Locally, simply run: + +```shell +locust -f ./internal_stress_test.py --type [ email | sms ] +``` + +Follow the localhost address that the console will display to get to the UI. It will ask you how many total users and spawned users you want configured. Once setup, you can manually start the tests via the UI and follow the summary data and charts visually. + +### Headless, via the command line + +You can pass the necessary parameters to the command line to run in the headless mode. For example: + +```shell +locust -f ./internal_stress_test.py --headless --type [ email | sms ] +``` + +The defaults in `locust.conf` may be overridden by command line options. \ No newline at end of file diff --git a/scripts/internal-stress-test/internal_stress_test.py b/scripts/internal-stress-test/internal_stress_test.py new file mode 100644 index 0000000000..beea7eb6a0 --- /dev/null +++ b/scripts/internal-stress-test/internal_stress_test.py @@ -0,0 +1,42 @@ +import os +import sys + +from dotenv import load_dotenv +from locust import HttpUser, constant_pacing, events, task + +load_dotenv() + +# Match with app/config.py +INTERNAL_TEST_NUMBER = "+16135550123" +INTERNAL_TEST_EMAIL_ADDRESS = "internal.test@cds-snc.ca" + + +@events.init_command_line_parser.add_listener +def _(parser): + parser.add_argument("--type", type=str, default="none", help="email or sms") + + +class NotifyApiUser(HttpUser): + wait_time = constant_pacing(1) # each user makes one post per second + + def __init__(self, *args, **kwargs): + super(NotifyApiUser, self).__init__(*args, **kwargs) + self.headers = {"Authorization": f"apikey-v1 {os.getenv('API_KEY')}"} + self.email_template = os.getenv("EMAIL_TEMPLATE_ID") + self.sms_template = os.getenv("SMS_TEMPLATE_ID") + self.email_address = INTERNAL_TEST_EMAIL_ADDRESS + self.phone_number = INTERNAL_TEST_NUMBER + self.type = self.environment.parsed_options.type + + if self.type not in ["email", "sms"]: + print("Invalid type. Must have --type email or --type sms") + sys.exit() + + @task(1) + def send_notification(self): + if self.type == "email": + json = {"email_address": self.email_address, "template_id": self.email_template} + self.client.post("/v2/notifications/email", json=json, headers=self.headers) + else: + json = {"phone_number": self.phone_number, "template_id": self.sms_template} + self.client.post("/v2/notifications/sms", json=json, headers=self.headers) diff --git a/scripts/internal-stress-test/locust.conf b/scripts/internal-stress-test/locust.conf new file mode 100644 index 0000000000..d88ebcd001 --- /dev/null +++ b/scripts/internal-stress-test/locust.conf @@ -0,0 +1,3 @@ +users=1 +stop-timeout=10 +host=https://api.staging.notification.cdssandbox.xyz diff --git a/scripts/internal-stress-test/setup.sh b/scripts/internal-stress-test/setup.sh new file mode 100755 index 0000000000..d857a30830 --- /dev/null +++ b/scripts/internal-stress-test/setup.sh @@ -0,0 +1,2 @@ +#!/bin/bash +pip install locust python-dotenv \ No newline at end of file diff --git a/scripts/load_test/.env.example b/scripts/load_test/.env.example new file mode 100644 index 0000000000..50335a1d9e --- /dev/null +++ b/scripts/load_test/.env.example @@ -0,0 +1,7 @@ +API_KEY= +HIGH_PRIORITY_EMAIL_TEMPLATE_ID= +MEDIUM_PRIORITY_EMAIL_TEMPLATE_ID= +LOW_PRIORITY_EMAIL_TEMPLATE_ID= +HIGH_PRIORITY_SMS_TEMPLATE_ID= +MEDIUM_PRIORITY_SMS_TEMPLATE_ID= +LOW_PRIORITY_SMS_TEMPLATE_ID= diff --git a/scripts/load_test/README.md b/scripts/load_test/README.md new file mode 100644 index 0000000000..356f341395 --- /dev/null +++ b/scripts/load_test/README.md @@ -0,0 +1,41 @@ +# Load test + +## Goals + +The goal of this code is to do a realistic load test of api while we make significant application or infrastructure changes. + +## How to configure + +Run the setup.sh to install the python pre-requisites or run in the repo devcontainer. + +Default configuration is in the `locust.conf` file. + +The python file `load_test.py` requires environment variables as listed in `.env.example`. The templates should have no variables. + +__See One Password note "Load Test Variables" in Shared-New-Notify-Staging folder__ + + +## How to run + +There are two ways to run Locust, with the UI or headless. + +### With the UI + +Locally you can run the email soak test with: + +```shell +locust -f ./load_test.py +``` + +Follow the localhost address that the console will display to get to the UI. It will ask you how many total users and spawned users you want configured. Once setup, you can manually start the tests via the UI and follow the summary data and charts visually. + +### Headless, via the command line + +You can pass the necessary parameters to the command line to run in the headless mode. For example: + +```shell +locust -f ./load_test.py --headless +``` + +The defaults in `locust.conf` may be overridden by command line options + diff --git a/scripts/load_test/load_test.py b/scripts/load_test/load_test.py new file mode 100644 index 0000000000..6f1219f5d9 --- /dev/null +++ b/scripts/load_test/load_test.py @@ -0,0 +1,90 @@ +import csv +import os +from datetime import datetime +from io import StringIO +from typing import Iterator, List + +from dotenv import load_dotenv +from locust import HttpUser, constant_pacing, task + +load_dotenv() + + +def rows_to_csv(rows: List[List[str]]): + output = StringIO() + writer = csv.writer(output) + writer.writerows(rows) + return output.getvalue() + + +def job_lines(data: str, number_of_lines: int) -> Iterator[List[str]]: + return map(lambda n: [data], range(0, number_of_lines)) + + +class NotifyApiUser(HttpUser): + wait_time = constant_pacing(1) # do something every second + + def __init__(self, *args, **kwargs): + super(NotifyApiUser, self).__init__(*args, **kwargs) + + self.headers = {"Authorization": f"apikey-v1 {os.getenv('API_KEY')}"} + self.email_address = "success@simulator.amazonses.com" + self.phone_number = "16135550123" # INTERNAL_TEST_NUMBER, does not actually send SMS + self.high_priority_email_template = os.getenv("HIGH_PRIORITY_EMAIL_TEMPLATE_ID") + self.medium_priority_email_template = os.getenv("MEDIUM_PRIORITY_EMAIL_TEMPLATE_ID") + self.low_priority_email_template = os.getenv("LOW_PRIORITY_EMAIL_TEMPLATE_ID") + self.high_priority_sms_template = os.getenv("HIGH_PRIORITY_SMS_TEMPLATE_ID") + self.medium_priority_sms_template = os.getenv("MEDIUM_PRIORITY_SMS_TEMPLATE_ID") + self.low_priority_sms_template = os.getenv("LOW_PRIORITY_SMS_TEMPLATE_ID") + + def send_bulk_email(self, template: str, count: int): + json = { + "name": f"bulk emails {datetime.utcnow().isoformat()}", + "template_id": template, + "csv": rows_to_csv([["email address"], *job_lines(self.email_address, count)]) + } + self.client.post("/v2/notifications/bulk", json=json, headers=self.headers, timeout=60) + + def send_bulk_sms(self, template: str, count: int): + json = { + "name": f"bulk sms {datetime.utcnow().isoformat()}", + "template_id": template, + "csv": rows_to_csv([["phone_number"], *job_lines(self.phone_number, count)]) + } + self.client.post("/v2/notifications/bulk", json=json, headers=self.headers, timeout=60) + + # SMS Tasks + + @task(120) # about every 5 seconds + def send_high_priority_sms(self): + json = {"phone_number": self.phone_number, "template_id": self.high_priority_sms_template} + self.client.post("/v2/notifications/sms", json=json, headers=self.headers) + + @task(2) # about every 5 minutes + def send_medium_priority_sms(self): + self.send_bulk_sms(self.medium_priority_sms_template, 199) + + @task(1) # about every 10 minutes + def send_low_priority_sms(self): + self.send_bulk_sms(self.low_priority_sms_template, 1000) + + # Email Tasks + + @task(120) # about every 5 seconds + def send_high_priority_email(self): + json = {"email_address": self.email_address, "template_id": self.high_priority_email_template} + self.client.post("/v2/notifications/email", json=json, headers=self.headers) + + @task(2) # about every 5 minutes + def send_medium_priority_email(self): + self.send_bulk_email(self.medium_priority_email_template, 199) + + @task(1) # about every 10 minutes + def send_low_priority_emails(self): + self.send_bulk_email(self.low_priority_email_template, 10000) + + # Do nothing task + + @task(600 - 120 - 2 - 1 - 120 - 2 - 1) + def do_nothing(self): + pass diff --git a/scripts/load_test/locust.conf b/scripts/load_test/locust.conf new file mode 100644 index 0000000000..d88ebcd001 --- /dev/null +++ b/scripts/load_test/locust.conf @@ -0,0 +1,3 @@ +users=1 +stop-timeout=10 +host=https://api.staging.notification.cdssandbox.xyz diff --git a/scripts/load_test/setup.sh b/scripts/load_test/setup.sh new file mode 100755 index 0000000000..191b418eda --- /dev/null +++ b/scripts/load_test/setup.sh @@ -0,0 +1,2 @@ +#!/bin/bash +pip install locust python-dotenv diff --git a/scripts/resign_database.py b/scripts/resign_database.py new file mode 100755 index 0000000000..55a8475c5b --- /dev/null +++ b/scripts/resign_database.py @@ -0,0 +1,55 @@ +""" +Script to resign certain database fields: + - api key secrets + - inbound sms content + - service callback bearer_tokens + +Needs Notify config variables and access to the database. In AWS run on an api pod. + +Usage (run from the scripts/ folder): + python resign_database.py [unsafe] + - unsafe: unsign regardless of whether the current secret key can verify the signature +""" + +import argparse +import sys + +from dotenv import load_dotenv +from flask import Flask + +sys.path.append('..') # needed so we can find app (as run from scripts/ folder) + +from flask import current_app # noqa: E402 + +from app import create_app # noqa: E402 +from app.dao.api_key_dao import resign_api_keys # noqa: E402 +from app.dao.inbound_sms_dao import resign_inbound_sms # noqa: E402 +from app.dao.notifications_dao import resign_notifications # noqa: E402 +from app.dao.service_callback_api_dao import resign_service_callbacks # noqa: E402 + + +def resign_all(chunk: int, resign: bool, unsafe: bool, notifications: bool): + resign_api_keys(resign, unsafe) + resign_inbound_sms(resign, unsafe) + resign_service_callbacks(resign, unsafe) + if notifications: + resign_notifications(chunk, resign, unsafe) + if not resign: + current_app.logger.info("NOTE: this is a preview, fields have not been changed. To resign fields, run with --resign flag") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-n", "--notifications", default=False, action='store_true', help="resign notifications (default false)") + parser.add_argument("-c", "--chunk", default=25000, type=int, help="size of chunks of notifications to resign at a time (default 25000)") + parser.add_argument("-r", "--resign", default=False, action='store_true', help="resign columns (default false)") + parser.add_argument("-u", "--unsafe", default=False, action='store_true', help="ignore bad signatures (default false)") + + args = parser.parse_args() + + load_dotenv() + application = Flask("resign_database") + create_app(application) + application.app_context().push() + + resign_all(args.chunk, args.resign, args.unsafe, args.notifications) diff --git a/scripts/run_app.sh b/scripts/run_app.sh index 2c5066831a..71a6797318 100755 --- a/scripts/run_app.sh +++ b/scripts/run_app.sh @@ -2,4 +2,4 @@ set -e -flask run -p 6011 --host=0.0.0.0 +make run diff --git a/scripts/run_celery.ps1 b/scripts/run_celery.ps1 index a8265f1be2..b35cb71e94 100644 --- a/scripts/run_celery.ps1 +++ b/scripts/run_celery.ps1 @@ -1,3 +1,3 @@ $ENV:FORKED_BY_MULTIPROCESSING=1 -celery --app run_celery worker --pidfile="$env:TEMP\celery.pid" --pool=solo --loglevel=DEBUG --concurrency=4 -Q database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-email-tasks,service-callbacks,delivery-receipts +celery --app run_celery worker --pidfile="$env:TEMP\celery.pid" --pool=solo --loglevel=DEBUG --concurrency=1 -Q "database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,service-callbacks,service-callbacks-retry,delivery-receipts" diff --git a/scripts/run_celery.sh b/scripts/run_celery.sh index 53287a559c..6d83d67054 100755 --- a/scripts/run_celery.sh +++ b/scripts/run_celery.sh @@ -2,4 +2,8 @@ set -e -celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency=4 -Q database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-tasks,send-email-tasks,service-callbacks,delivery-receipts +# Runs celery with all celery queues except the throtted sms queue. + +echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" + +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,service-callbacks,service-callbacks-retry,delivery-receipts diff --git a/scripts/run_celery_beat.ps1 b/scripts/run_celery_beat.ps1 new file mode 100644 index 0000000000..d823386a09 --- /dev/null +++ b/scripts/run_celery_beat.ps1 @@ -0,0 +1,3 @@ +$ENV:FORKED_BY_MULTIPROCESSING=1 + +celery -A run_celery beat --pidfile="$env:TEMP\celery-beat.pid" --loglevel=INFO \ No newline at end of file diff --git a/scripts/run_celery_beat.sh b/scripts/run_celery_beat.sh index 81badfb9b0..2479d725ad 100755 --- a/scripts/run_celery_beat.sh +++ b/scripts/run_celery_beat.sh @@ -2,4 +2,6 @@ set -e +# Runs the celery beat process, i.e the Celery periodic tasks. + celery -A run_celery.notify_celery beat --loglevel=INFO diff --git a/scripts/run_celery_beat_local.sh b/scripts/run_celery_beat_local.sh new file mode 100755 index 0000000000..f918ea7176 --- /dev/null +++ b/scripts/run_celery_beat_local.sh @@ -0,0 +1,7 @@ +#!/bin/sh + +# runs the celery beat process. This runs the periodic tasks + +set -e + +celery -A run_celery.notify_celery beat --loglevel=INFO diff --git a/scripts/run_celery_core_tasks.sh b/scripts/run_celery_core_tasks.sh new file mode 100755 index 0000000000..060af2ad37 --- /dev/null +++ b/scripts/run_celery_core_tasks.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +set -e + +# Runs celery with all celery queues except send-throttled-sms-tasks, +# send-sms-* and send-email-*. + +echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" + +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,service-callbacks,service-callbacks-retry,delivery-receipts diff --git a/scripts/run_celery_exit.sh b/scripts/run_celery_exit.sh new file mode 100755 index 0000000000..97ed326f56 --- /dev/null +++ b/scripts/run_celery_exit.sh @@ -0,0 +1,74 @@ +#!/bin/bash + +set -e + +TERMINATE_TIMEOUT=9 + +function get_celery_pids { + # get the PIDs of the process whose parent is the root process + # print only pid and their command, get the ones with "celery" in their name + # and keep only these PIDs + + set +o pipefail # so grep returning no matches does not premature fail pipe + # shellcheck disable=SC2009 # We don't want to bother re-writing this to use pgrep + APP_PIDS=$(ps aux --sort=start_time | grep 'celery worker' | grep 'bin/celery' | head -1 | awk '{print $2}') + set -o pipefail # pipefail should be set everywhere else +} + +function send_signal_to_celery_processes { + # refresh pids to account for the case that some workers may have terminated but others not + get_celery_pids + # send signal to all remaining apps + echo "${APP_PIDS}" | tr -d '\n' | tr -s ' ' | xargs echo "Sending signal ${1} to processes with pids: " >> /proc/1/fd/1 + echo "We will send ${1} signal" >> /proc/1/fd/1 + for value in ${APP_PIDS} + do + echo kill -s "${1}" "$value" + kill -s "${1}" "$value" + done + #echo ${APP_PIDS} | xargs kill -s ${1} +} + +function error_exit() +{ + echo "Error: $1" >> /proc/1/fd/1 +} + +function ensure_celery_is_running { + if [ "${APP_PIDS}" = "" ]; then + echo "There are no celery processes running, this container is bad" >> /proc/1/fd/1 + + echo "Exporting CF information for diagnosis" >> /proc/1/fd/1 + + env | grep CF + + exit 1 + fi +} + + +function on_exit { + apk add --no-cache procps + apk add --no-cache coreutils + echo "multi worker app exiting" >> /proc/1/fd/1 + wait_time=0 + + send_signal_to_celery_processes TERM + + # check if the apps are still running every second + while [[ "$wait_time" -le "$TERMINATE_TIMEOUT" ]]; do + echo "exit function is running with wait time of 9s" >> /proc/1/fd/1 + get_celery_pids + ensure_celery_is_running + # shellcheck disable=SC2219 # We could probably rewrite it as `((wait_time++)) || true` but I haven't tested and I assume this works as is + let wait_time=wait_time+1 + sleep 1 + done + + echo "sending signal to celery to kill process as TERM signal has not timed out" >> /proc/1/fd/1 + send_signal_to_celery_processes KILL +} + +echo "Run script pid: $$" >> /proc/1/fd/1 + +on_exit diff --git a/scripts/run_celery_local.sh b/scripts/run_celery_local.sh new file mode 100755 index 0000000000..9eb29f2658 --- /dev/null +++ b/scripts/run_celery_local.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +# runs celery with all celery queues +# This is for local use only, NOT for use in AWS + +set -e + +echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" + +celery -A run_celery.notify_celery worker --beat --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-sms-high,send-sms-medium,send-sms-low,send-throttled-sms-tasks,send-email-high,send-email-medium,send-email-low,service-callbacks,service-callbacks-retry,delivery-receipts diff --git a/scripts/run_celery_no_sms_sending.sh b/scripts/run_celery_no_sms_sending.sh new file mode 100755 index 0000000000..53546088b1 --- /dev/null +++ b/scripts/run_celery_no_sms_sending.sh @@ -0,0 +1,31 @@ +#!/bin/sh + +set -e + +# Runs celery with all celery queues except send-throttled-sms-tasks, +# send-sms-high, send-sms-medium, or send-sms-low. + +# Check and see if this is running in K8s and if so, wait for cloudwatch agent +if [ -n "${STATSD_HOST}" ]; then + echo "Initializing... Waiting for CWAgent to become ready within the next 30 seconds." + timeout=30 + while [ $timeout -gt 0 ]; do + if nc -vz "$STATSD_HOST" 25888; then + echo "CWAgent is Ready." + break + else + echo "Waiting for CWAgent to become ready." + sleep 1 + timeout=$((timeout - 1)) + fi + done + + if [ $timeout -eq 0 ]; then + echo "Timeout reached. CWAgent did not become ready in 30 seconds." + exit 1 + fi +fi + +echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" + +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q database-tasks,-priority-database-tasks.fifo,-normal-database-tasks,-bulk-database-tasks,job-tasks,notify-internal-tasks,periodic-tasks,priority-tasks,normal-tasks,bulk-tasks,reporting-tasks,research-mode-tasks,retry-tasks,send-email-high,send-email-medium,send-email-low,service-callbacks,service-callbacks-retry,delivery-receipts diff --git a/scripts/run_celery_purge.sh b/scripts/run_celery_purge.sh new file mode 100755 index 0000000000..ea67a3c807 --- /dev/null +++ b/scripts/run_celery_purge.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +set -e + +printf "\n--------------------------------------------------\n" +printf " WARNING!!!!\n" +printf " This script is for local development only!\n" +printf " It will delete everything in the celery queues.\n" +printf "\n--------------------------------------------------\n" +printf "Are you sure you want to continue?" +echo "If so, type 'purge'> \c" +read -r check + case $check in + purge ) echo "purging!"; celery -A run_celery.notify_celery purge -f;; + * ) printf "\nNot purging\n";; + esac + diff --git a/scripts/run_celery_send_email.sh b/scripts/run_celery_send_email.sh new file mode 100755 index 0000000000..29c6039f09 --- /dev/null +++ b/scripts/run_celery_send_email.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set -e + +# Runs celery with only the send-email-* queues. + +echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" + +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-email-high,send-email-medium,send-email-low diff --git a/scripts/run_celery_send_sms.sh b/scripts/run_celery_send_sms.sh new file mode 100755 index 0000000000..7aee759338 --- /dev/null +++ b/scripts/run_celery_send_sms.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set -e + +# Runs celery with only the send-sms-* queues. + +echo "Start celery, concurrency: ${CELERY_CONCURRENCY-4}" + +celery -A run_celery.notify_celery worker --pidfile="/tmp/celery.pid" --loglevel=INFO --concurrency="${CELERY_CONCURRENCY-4}" -Q send-sms-high,send-sms-medium,send-sms-low diff --git a/scripts/run_celery_sms.sh b/scripts/run_celery_sms.sh index b36d309954..302ecc5f15 100755 --- a/scripts/run_celery_sms.sh +++ b/scripts/run_celery_sms.sh @@ -2,4 +2,6 @@ set -e +# Runs celery with only the throttled sms sending queue. + celery -A run_celery.notify_celery worker --loglevel=INFO --concurrency=1 -Q send-throttled-sms-tasks diff --git a/scripts/run_single_test.sh b/scripts/run_single_test.sh index 3d05f4bb0d..ce20b8318d 100755 --- a/scripts/run_single_test.sh +++ b/scripts/run_single_test.sh @@ -1,4 +1,5 @@ #!/bin/sh # run a single unit test, pass in the unit test name for example: tests/app/service/test_rest.py::test_get_template_list -source environment_test.sh -py.test $@ +# shellcheck source=/dev/null # Not finding this file in code base +. environment_test.sh +py.test "$@" diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index 54587f8b5c..53ada3e0d9 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -12,9 +12,9 @@ function display_result { EXIT_STATUS=$2 TEST=$3 - if [ $RESULT -ne 0 ]; then + if [ "$RESULT" -ne 0 ]; then echo -e "\033[31m$TEST failed\033[0m" - exit $EXIT_STATUS + exit "$EXIT_STATUS" else echo -e "\033[32m$TEST passed\033[0m" fi @@ -35,6 +35,14 @@ display_result $? 2 "Import order check" mypy . display_result $? 1 "Type check" -# run with four concurrent threads -py.test --disable-pytest-warnings --cov=app --cov-report=term-missing tests/ --junitxml=test_results.xml -n4 -v --maxfail=10 -display_result $? 2 "Unit tests" +# Run tests that need serial execution. +if ! docker info > /dev/null 2>&1; then + echo "This test uses docker, and it isn't running - please start docker and try again." + exit 1 +fi +py.test --disable-pytest-warnings --cov=app --cov-report=term-missing tests/ --junitxml=test_results_serial.xml -v --maxfail=10 -m "serial" +display_result $? 2 "Unit tests [serial]" + +# Run with four concurrent threads. +py.test --disable-pytest-warnings --cov=app --cov-report=term-missing tests/ --junitxml=test_results.xml -n4 -v --maxfail=10 -m "not serial" +display_result $? 2 "Unit tests [concurrent]" diff --git a/scripts/soak_test/.env.example b/scripts/soak_test/.env.example new file mode 100644 index 0000000000..6e797ff095 --- /dev/null +++ b/scripts/soak_test/.env.example @@ -0,0 +1,3 @@ +API_KEY= +EMAIL_TEMPLATE_ID= +SMS_TEMPLATE_ID= diff --git a/scripts/soak_test/README.md b/scripts/soak_test/README.md new file mode 100644 index 0000000000..04804dde05 --- /dev/null +++ b/scripts/soak_test/README.md @@ -0,0 +1,96 @@ +# Soak test + +## Goals + +The goal of this code is to do a soak test of api while we make significant application or infrastructure changes. + +There are two soak tests here: +- `soak_test_send_notification.py` will POST an email or SMS to api every second. +- `soak_test_all_servers.py` will do a GET to all our servers (admin, api, dd-api, api-k8s, documentation), on average hitting each server once a second + +## How to configure + +Run the setup.sh to install the python pre-requisites or run in the repo devcontainer. + +Default configuration is in the `locust.conf` file. Note that the `host` is the base address of the system you are testing, for example `https://staging.notification.cdssandbox.xyz` **not** `https://api.staging.notification.cdssandbox.xyz`. The "api" prefix will be added in the code. + +The python file `soak_test_send_notification.py` requires environment variables `API_KEY`, `EMAIL_TEMPLATE_ID`, and `SMS_TEMPLATE_ID` . The template should have no personalisation variables. + +``` +API_KEY=gcntfy-notAKey-f6c7cc49-b5b7-4e67-a8ff-24f34be34523-f6c7cc49-b5b7-4e67-a8ff-24f34be34523 +EMAIL_TEMPLATE_ID=f6c7cc49-b5b7-4e67-a8ff-24f34be34523 +SMS_TEMPLATE_ID=f6c7cc49-b5b7-4e67-aeef-24f34be34523 +``` +These can be in a `.env` file in the soak_test directory. + +__See Last Pass note "Soak Test Staging API Key and Template" in Shared-New-Notify-Staging folder__ + +Notes: +- The default configuration in `locust.conf` is to send one email per second. +- You can supply a `--ref` option to `soak_test_send_notification.py` that will set the notification's `client_reference`. This is useful in testing that all POSTs were processed successfully. +- You can also supply a `--sms` option that will sens sms instead of email. + +## How to run + +There are two ways to run Locust, with the UI or headless. + +### With the UI + +Locally you can run the email soak test with: + +```shell +locust -f ./soak_test_send_notification.py [--ref=soak-2023-05-30-A] [--sms] +``` + +Follow the localhost address that the console will display to get to the UI. It will ask you how many total users and spawned users you want configured. Once setup, you can manually start the tests via the UI and follow the summary data and charts visually. + +The server soak test can be run with + +```shell +locust -f ./soak_test_all_servers.py +``` + +### Headless, via the command line + +You can pass the necessary parameters to the command line to run in the headless mode. For example: + +```shell +locust -f ./soak_test_send_notification.py --headless [--ref=soak-2023-05-30-A] [--sms] +``` + +The defaults in `locust.conf` may be overridden by command line options + +The server soak test can be run with + +```shell +locust -f ./soak_test_all_servers.py --headless +``` + +## Checking if all notifications were sent + +To check whether all the POSTs from `soak_test_send_notification.py` made it into the database, run the "Soak test" query on blazer. The query is already in staging, or you can run: + +```sql +WITH +data as ( + select + n.created_at, n.sent_at, n.updated_at, client_reference, notification_status as status, t.process_type as priority + from notifications n join templates t on n.template_id = t.id + where client_reference like concat('%', 'soak-2023-05-30-A'::text, '%') +), +munged as ( + select *, + EXTRACT(epoch FROM updated_at - created_at) as total_time + from data +), +stats as ( + select + status, count(*), + percentile_cont(0.5) within group(order by total_time) AS total_median, + avg(total_time) as total_mean + from munged + group by status +) +select * from stats +``` + diff --git a/scripts/soak_test/locust.conf b/scripts/soak_test/locust.conf new file mode 100644 index 0000000000..44c0e7a6ad --- /dev/null +++ b/scripts/soak_test/locust.conf @@ -0,0 +1,3 @@ +users=1 +stop-timeout=10 +host=https://staging.notification.cdssandbox.xyz diff --git a/scripts/soak_test/setup.sh b/scripts/soak_test/setup.sh new file mode 100755 index 0000000000..d857a30830 --- /dev/null +++ b/scripts/soak_test/setup.sh @@ -0,0 +1,2 @@ +#!/bin/bash +pip install locust python-dotenv \ No newline at end of file diff --git a/scripts/soak_test/soak_test_all_servers.py b/scripts/soak_test/soak_test_all_servers.py new file mode 100644 index 0000000000..089ef07b77 --- /dev/null +++ b/scripts/soak_test/soak_test_all_servers.py @@ -0,0 +1,60 @@ +from dotenv import load_dotenv +from locust import HttpUser, TaskSet, constant_pacing, task +from locust.clients import HttpSession +from soak_utils import url_with_prefix + +load_dotenv() + + +class MultipleHostsUser(HttpUser): + abstract = True + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.admin_client = HttpSession( + base_url=self.host, request_event=self.client.request_event, user=self + ) + + self.api_client = HttpSession( + base_url=url_with_prefix(self.host, "api"), request_event=self.client.request_event, user=self + ) + + self.api_k8s_client = HttpSession( + base_url=url_with_prefix(self.host, "api-k8s"), request_event=self.client.request_event, user=self + ) + + self.dd_api_client = HttpSession( + base_url=url_with_prefix(self.host, "api.document"), request_event=self.client.request_event, user=self + ) + + self.documentation_client = HttpSession( + base_url=url_with_prefix(self.host, "documentation"), request_event=self.client.request_event, user=self + ) + + +class UserTasks(TaskSet): + @task + def test_admin(self): + self.user.admin_client.get("/_status?simple=true", name=f"{self.user.admin_client.base_url}/_status?simple=true") + + @task + def test_api(self): + self.user.api_client.get("/_status?status=true", name=f"{self.user.api_client.base_url}/_status?simple=true") + + @task + def test_api_k8s(self): + self.user.api_k8s_client.get("/_status?status=true", name=f"{self.user.api_k8s_client.base_url}/_status?simple=true") + + @task + def test_dd_api(self): + self.user.dd_api_client.get("/_status?simple=true", name=f"{self.user.dd_api_client.base_url}/_status?simple=true") + + @task + def test_documentation(self): + self.user.documentation_client.get("/", name=f"{self.user.documentation_client.base_url}/") + + +class WebsiteUser(MultipleHostsUser): + wait_time = constant_pacing(0.2) # 5 GETs a second, so each server every second on average + tasks = [UserTasks] diff --git a/scripts/soak_test/soak_test_send_notification.py b/scripts/soak_test/soak_test_send_notification.py new file mode 100644 index 0000000000..d3cab98765 --- /dev/null +++ b/scripts/soak_test/soak_test_send_notification.py @@ -0,0 +1,38 @@ +import os + +from dotenv import load_dotenv +from locust import HttpUser, constant_pacing, events, task +from soak_utils import url_with_prefix + +load_dotenv() + + +@events.init_command_line_parser.add_listener +def _(parser): + parser.add_argument("--ref", type=str, default="test", help="reference") + parser.add_argument("--sms", action='store_true', help="send sms") + + +class NotifyApiUser(HttpUser): + wait_time = constant_pacing(1) # each user makes one post per second + + def __init__(self, *args, **kwargs): + self.host = url_with_prefix(self.host, "api") + + super(NotifyApiUser, self).__init__(*args, **kwargs) + self.headers = {"Authorization": f"apikey-v1 {os.getenv('API_KEY')}"} + self.email_template = os.getenv("EMAIL_TEMPLATE_ID") + self.sms_template = os.getenv("SMS_TEMPLATE_ID") + self.email_address = "success@simulator.amazonses.com" + self.phone_number = "+16135550123" # INTERNAL_TEST_NUMBER + self.reference_id = self.environment.parsed_options.ref + self.send_sms = self.environment.parsed_options.sms + + @task(1) + def send_notification(self): + if self.send_sms: + json = {"phone_number": self.phone_number, "template_id": self.sms_template, "reference": self.reference_id} + self.client.post("/v2/notifications/sms", json=json, headers=self.headers) + else: + json = {"email_address": self.email_address, "template_id": self.email_template, "reference": self.reference_id} + self.client.post("/v2/notifications/email", json=json, headers=self.headers) diff --git a/scripts/soak_test/soak_utils.py b/scripts/soak_test/soak_utils.py new file mode 100644 index 0000000000..3a62cf115d --- /dev/null +++ b/scripts/soak_test/soak_utils.py @@ -0,0 +1,6 @@ +from urllib.parse import urlparse + + +def url_with_prefix(url: str, prefix: str) -> str: + parsed_url = urlparse(url) + return parsed_url._replace(netloc=f"{prefix}.{parsed_url.netloc}").geturl() diff --git a/tests-perf/README.md b/tests-perf/README.md new file mode 100644 index 0000000000..24fce4ee63 --- /dev/null +++ b/tests-perf/README.md @@ -0,0 +1,4 @@ +# GC Notify performance tests + +See subfolders for tests (and documentation) focusing on different aspects of GC Notify. + diff --git a/tests-perf/individual_emails/README.md b/tests-perf/individual_emails/README.md new file mode 100644 index 0000000000..6d5aca08c5 --- /dev/null +++ b/tests-perf/individual_emails/README.md @@ -0,0 +1,72 @@ +# Individual email stress test + +## Goals + +The goal of this code is to load test the api with individual emails. + +## How to configure + +Some test configuration is in the `locust.conf` file. + +The python file `individual-emails.py` requires these environment variables: +``` +PERF_TEST_AUTH_HEADER="apikey-v1 xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" +PERF_TEST_EMAIL_TEMPLATE_ID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +``` + +Note that `individual-emails.py` is configured to have each user send 1 email per second. + +You can supply a `--ref=test` option (defined in `individual-emails.py`) that will set a prefix for the notification's `client_reference`. This is useful in testing that all POSTs were processed successfully.] + +Note that there are three tasks that can be run, `send_email()`, `send_email_with_file_attachment()`, and `send_email_with_5_file_attachments()`. Set the task weights as desired (including setting some to zero to not run that task) + +## How to run + +There are two ways to run Locust, with the UI or headless. + +### With the UI + +Locally, simply run: + +```shell +poetry run locust -f ./individual-emails.py +``` + +Follow the localhost address that the console will display to get to the UI. It will ask you how many total users and spawned users you want configured. Once setup, you can manually start the tests via the UI and follow the summary data and charts visually. + +### Headless, via the command line + +You can pass the necessary parameters to the command line to run in the headless mode. For example: + +```shell +poetry run locust -f ./individual-emails.py --headless --stop-timeout=10 --host=https://api.staging.notification.cdssandbox.xyz --run-time=10m --users=5 --ref=load-test +``` + +You can also set many of these parameters in the *locust.conf* file. + +To check send times you can run the a blazer query such as + +```sql +WITH + ref AS (VALUES ('load-test')), +email_initial_data as ( + select + n.created_at, n.sent_at, n.updated_at, client_reference, notification_status as status, t.process_type as priority + from notifications n join templates t on n.template_id = t.id + where client_reference like concat('%', (table ref), '%') +), +email_data as ( + select *, + EXTRACT(epoch FROM updated_at - created_at) as total_time, + from email_initial_data +), +email_stats as ( + select + status, count(*), + percentile_cont(0.5) within group(order by total_time) AS total_median, + avg(total_time) as total_mean + from email_data + group by status +) +select * from email_stats +``` diff --git a/tests-perf/individual_emails/individual-emails.py b/tests-perf/individual_emails/individual-emails.py new file mode 100644 index 0000000000..b1b0698d74 --- /dev/null +++ b/tests-perf/individual_emails/individual-emails.py @@ -0,0 +1,68 @@ +import os +from base64 import b64encode + +from dotenv import load_dotenv +from locust import HttpUser, constant_pacing, events, task + +load_dotenv() + + +@events.init_command_line_parser.add_listener +def _(parser): + parser.add_argument("--ref", type=str, default="test", help="Prefix for reference") + + +class NotifyApiUser(HttpUser): + wait_time = constant_pacing(1) # each user makes one post per second + + def __init__(self, *args, **kwargs): + super(NotifyApiUser, self).__init__(*args, **kwargs) + self.headers = {"Authorization": os.getenv("PERF_TEST_AUTH_HEADER")} + self.email_address = os.getenv("PERF_TEST_EMAIL", "success@simulator.amazonses.com") + self.email_template = os.getenv("PERF_TEST_EMAIL_TEMPLATE_ID") + + @task(1) + def send_email(self): + reference_id = self.environment.parsed_options.ref + json = {"email_address": self.email_address, "template_id": self.email_template, "reference": reference_id} + self.client.post("/v2/notifications/email", json=json, headers=self.headers) + + @task(0) + def send_email_with_file_attachment(self): + reference_id = self.environment.parsed_options.ref + json = { + "email_address": self.email_address, + "template_id": self.email_template, + "reference": reference_id, + "personalisation": { + "attached_file": { + "file": "SGVsbG8gdGhlcmUgaG93IGFyZSB5b3U=", + "filename": "test_file.txt", + "sending_method": "attach", + } + }, + } + self.client.post("/v2/notifications/email", json=json, headers=self.headers) + + @task(0) + def send_email_with_5_large_file_attachments(self): + reference_id = self.environment.parsed_options.ref + personalisation = {} + file_length = 900000 # 900 KB each + + for i in range(5): + data = f"{i}" * file_length + base64_bytes = b64encode(data.encode()) + file = base64_bytes.decode("utf-8") + personalisation[f"attached_file{i}"] = { + "file": file, + "filename": "test_file.txt", + "sending_method": "attach", + } + json = { + "email_address": self.email_address, + "template_id": self.email_template, + "reference": reference_id, + "personalisation": personalisation, + } + self.client.post("/v2/notifications/email", json=json, headers=self.headers) diff --git a/tests-perf/individual_emails/locust.conf b/tests-perf/individual_emails/locust.conf new file mode 100644 index 0000000000..6de0e19816 --- /dev/null +++ b/tests-perf/individual_emails/locust.conf @@ -0,0 +1,2 @@ +spawn-rate = 100 +run-time = 10m diff --git a/tests-perf/locust/README.md b/tests-perf/locust/README.md index 53f38645bf..afe4c639f0 100644 --- a/tests-perf/locust/README.md +++ b/tests-perf/locust/README.md @@ -36,6 +36,17 @@ You should not have to modify the configuration to run the stress-tests locally. There are two ways to run Locust, with the UI or headless. +### Add the following to your .env file (see 1Password): + +``` +PERF_TEST_AUTH_HEADER = +PERF_TEST_BULK_EMAIL_TEMPLATE_ID= +PERF_TEST_EMAIL_WITH_LINK_TEMPLATE_ID= +PERF_TEST_EMAIL_TEMPLATE_ID= +PERF_TEST_EMAIL_WITH_ATTACHMENT_TEMPLATE_ID= +PERF_TEST_SMS_TEMPLATE_ID = +``` + ### With the UI Locally, simply run: @@ -56,6 +67,13 @@ locust -f .\locust-notifications.py --headless --users=5500 --spawn-rate=200 --r You can also modify the *locust.config* file to enable the headless mode and define the necessary users, spawn rate and run time. +## Email send rate test + +We also max out the email send rate by sending 2000 x 5 emails per minute for 10 minutes. This can be run manually with the command +``` +locust --headless --host https://api.staging.notification.cdssandbox.xyz --locustfile tests-perf/locust/send_rate_email.py --users 5 --run-time 10m --spawn-rate 1 +``` + ### Performance Testing on AWS We run Notify performance tests on a daily manner through AWS ECS tasks diff --git a/tests-perf/locust/locust-notifications-with-fail.py b/tests-perf/locust/locust-notifications-with-fail.py new file mode 100644 index 0000000000..e030396afb --- /dev/null +++ b/tests-perf/locust/locust-notifications-with-fail.py @@ -0,0 +1,83 @@ +""" locust-notifications-with-bounce.py + isort:skip_file +""" +# flake8: noqa + +import os +import random +from dataclasses import make_dataclass + +from dotenv import load_dotenv +from locust import HttpUser, constant_pacing, task + +load_dotenv() +NotifyApiUserTemplateGroup = make_dataclass('NotifyApiUserTemplateGroup', [ + 'email_id', + 'email_with_attachment_id', + 'email_with_link_id', +]) + + +class NotifyApiUser(HttpUser): + + wait_time = constant_pacing(60) + host = os.getenv("PERF_TEST_DOMAIN", "https://api.staging.notification.cdssandbox.xyz") + + def __init__(self, *args, **kwargs): + super(NotifyApiUser, self).__init__(*args, **kwargs) + + self.headers = {"Authorization": os.getenv("PERF_TEST_AUTH_HEADER")} + self.fail_rate = float(os.getenv("PERF_TEST_FAIL_RATE", "0.1")) + self.email_success = os.getenv("PERF_TEST_EMAIL_SUCCESS", "success@simulator.amazonses.com") + self.template_group = NotifyApiUserTemplateGroup( + email_id=os.getenv("PERF_TEST_EMAIL_TEMPLATE_ID"), + email_with_attachment_id=os.getenv("PERF_TEST_EMAIL_WITH_ATTACHMENT_TEMPLATE_ID"), + email_with_link_id=os.getenv("PERF_TEST_EMAIL_WITH_LINK_TEMPLATE_ID"), + ) + + @task(16) + def send_email_notifications(self): + json = self.__email_json(self.template_group.email_id) + + self.client.post("/v2/notifications/email", json=json, headers=self.headers) + + @task(2) + def send_email_with_attachment_notifications(self): + personalisation = { + "attached_file": { + "file": "Q29udGVudCBvZiBBdHRhY2hlZCBmaWxl", + "filename": "attached_file.txt", + "sending_method": "attach", + } + } + json = self.__email_json(self.template_group.email_with_attachment_id, personalisation) + + self.client.post("/v2/notifications/email", json=json, headers=self.headers) + + @task(2) + def send_email_with_link_notifications(self): + personalisation = { + "application_file": { + "file": "Q29udGVudCBvZiBBdHRhY2hlZCBmaWxl", + "filename": "attached_file.txt", + "sending_method": "link", + } + } + json = self.__email_json(self.template_group.email_with_link_id, personalisation) + + self.client.post("/v2/notifications/email", json=json, headers=self.headers) + + def __email_json(self, template_id, personalisation={}): + email_invalid = [ + "complaint@simulator.amazonses.com", + "bounce@simulator.amazonses.com", + "ooto@simulator.amazonses.com", + "blacklist@simulator.amazonses.com" + ] + email_index = random.randint(0, len(email_invalid) - 1) + email = email_invalid[email_index] if random.random() <= self.fail_rate else self.email_success + return { + "email_address": email, + "template_id": template_id, + "personalisation": personalisation, + } diff --git a/tests-perf/locust/locust.conf b/tests-perf/locust/locust.conf index c1eba3b220..76aa3d2273 100644 --- a/tests-perf/locust/locust.conf +++ b/tests-perf/locust/locust.conf @@ -3,7 +3,7 @@ locustfile = tests-perf/locust/locust-notifications.py host = https://api.staging.notification.cdssandbox.xyz users = 3000 spawn-rate = 20 -run-time = 5m +run-time = 10m # headless = true # master = true diff --git a/tests-perf/locust/send_rate_email.py b/tests-perf/locust/send_rate_email.py new file mode 100644 index 0000000000..a2a26fef73 --- /dev/null +++ b/tests-perf/locust/send_rate_email.py @@ -0,0 +1,60 @@ +""" send_rate_email.py + isort:skip_file +""" +# flake8: noqa + +BULK_EMAIL_SIZE = 2000 + +import os +import sys +from datetime import datetime +from dataclasses import make_dataclass + +sys.path.append(os.path.abspath(os.path.join("..", "tests_smoke"))) + +from dotenv import load_dotenv +from locust import HttpUser, constant_pacing, task +from tests_smoke.smoke.common import job_line, rows_to_csv # type: ignore + +load_dotenv() +NotifyApiUserTemplateGroup = make_dataclass('NotifyApiUserTemplateGroup', [ + 'bulk_email_id', + 'email_id', + 'email_with_attachment_id', + 'email_with_link_id', + 'sms_id', +]) + + +class NotifyApiUser(HttpUser): + + wait_time = constant_pacing(60) # 60 seconds between each task + host = os.getenv("PERF_TEST_DOMAIN", "https://api.staging.notification.cdssandbox.xyz") + + def __init__(self, *args, **kwargs): + super(NotifyApiUser, self).__init__(*args, **kwargs) + + self.headers = {"Authorization": os.getenv("PERF_TEST_AUTH_HEADER")} + self.email = os.getenv("PERF_TEST_EMAIL", "success@simulator.amazonses.com") + self.phone_number = os.getenv("PERF_TEST_PHONE_NUMBER", "16135550123") + self.template_group = NotifyApiUserTemplateGroup( + bulk_email_id=os.getenv("PERF_TEST_BULK_EMAIL_TEMPLATE_ID"), + email_id=os.getenv("PERF_TEST_EMAIL_TEMPLATE_ID"), + email_with_attachment_id=os.getenv("PERF_TEST_EMAIL_WITH_ATTACHMENT_TEMPLATE_ID"), + email_with_link_id=os.getenv("PERF_TEST_EMAIL_WITH_LINK_TEMPLATE_ID"), + sms_id=os.getenv("PERF_TEST_SMS_TEMPLATE_ID"), + ) + + @task(1) + def send_bulk_email_notifications(self): + """ + Send BULK_EMAIL_SIZE emails through the /bulk endpoint + """ + + json = { + "name": f"Send rate test {datetime.utcnow().isoformat()}", + "template_id": self.template_group.bulk_email_id, + "csv": rows_to_csv([["email address", "application_file"], *job_line(self.email, BULK_EMAIL_SIZE)]) + } + + self.client.post("/v2/notifications/bulk", json=json, headers=self.headers) diff --git a/tests-perf/ops/Dockerfile b/tests-perf/ops/Dockerfile index 40bcefe4c2..ba36582848 100644 --- a/tests-perf/ops/Dockerfile +++ b/tests-perf/ops/Dockerfile @@ -1,20 +1,35 @@ -FROM python:3.9-alpine3.13 +FROM python:3.10-alpine3.16@sha256:afe68972cc00883d70b3760ee0ffbb7375cf09706c122dda7063ffe64c5be21b ENV PYTHONDONTWRITEBYTECODE 1 +ENV POETRY_VERSION "1.7.1" +ENV APP_VENV="/app/.venv" +ENV POETRY_HOME="/opt/poetry" +ENV POETRY_VERSION="1.7.1" +ENV POETRY_VIRTUALENVS_CREATE="false" +ENV PATH="${APP_VENV}/bin:${POETRY_HOME}/bin:$PATH" RUN apk add --no-cache bash build-base git libtool cmake autoconf automake gcc musl-dev postgresql-dev g++ libexecinfo-dev make libffi-dev libmagic libcurl curl-dev rust cargo && rm -rf /var/cache/apk/* -# update pip -RUN python -m pip install wheel -RUN python -m pip install --upgrade pip - RUN set -ex && mkdir /app - WORKDIR /app -COPY . /app +# Install poetry and isolate it in it's own venv +RUN python -m venv ${POETRY_HOME} \ + && ${POETRY_HOME}/bin/pip3 install poetry==${POETRY_VERSION} + +COPY pyproject.toml poetry.lock /app/ + +RUN python -m venv ${APP_VENV} \ + && . ${APP_VENV}/bin/activate \ + && poetry install \ + && poetry add wheel + +COPY . /app/ + +RUN . ${APP_VENV}/bin/activate \ + && make generate-version-file + -RUN set -ex && pip3 install -r requirements_for_test.txt RUN echo "fs.file-max = 100000" >> /etc/sysctl.conf ENTRYPOINT [ "bin/execute_and_publish_performance_test.sh" ] diff --git a/tests-perf/priorities/.env.example b/tests-perf/priorities/.env.example new file mode 100644 index 0000000000..07afef1272 --- /dev/null +++ b/tests-perf/priorities/.env.example @@ -0,0 +1,12 @@ +API_HOST_NAME= +API_KEY= + +NORMAL_EMAIL_TEMPLATE= +PRIORITY_EMAIL_TEMPLATE= +BULK_EMAIL_TEMPLATE= + +PRIORITY_SMS_TEMPLATE= +NORMAL_SMS_TEMPLATE= +BULK_SMS_TEMPLATE= + +JOB_SIZE= diff --git a/tests-perf/priorities/README.md b/tests-perf/priorities/README.md new file mode 100644 index 0000000000..a20f1e966d --- /dev/null +++ b/tests-perf/priorities/README.md @@ -0,0 +1,130 @@ +# Computing total time for notifications + +Make a .env file. Copy paste the env from LastPass (Staging / priority lanes perf test) + +## Running the tests + +Note first that there are a few sensible defaults set in `locust.conf`. In particular, to run with the gui you will have to override the `headless = true` setting + +### Posts to /email + +`tasks_individual_emails.py` will run 20 times as many bulk priority messages as high prioirty. +You can see and change the different weightings accordingly in priority-bulk_individal_emails.py + +In its current state 37 users sends around 2000 emails a min, and 67 will send 4000. +Run both tests to see what high and normal load does to the application. + +``` +locust -f ./tasks_individual_emails.py --run-time=10m --users=37 --ref=perf0524z-email +``` +will POST approximately 2000 bulk emails per minute and 100 priority emails per minute for 10 minutes. + +### Posts to /bulk + +`tasks_bulk_endpoint.py` will POST a file to bulk of size `JOB_SIZE` (default 10) every 10 seconds (per user). Best to run it with one user, and terminate after the desired number of POSTs have gone through. + +``` +locust -f ./tasks_bulk_endpoint.py --run-time=15s --users=1 --ref=perf0524z-bulk +``` +will POST to `/bulk` twice + +The tests add the current time to the notification's `reference` or the job's `name` when making the POST. We can use that to compute the total time from POST to delivery receipt: + +If you use the suffixes "-email" and "-bulk" for your reference / name then you can get statistics using the following Hasura query: + +```sql +WITH + ref AS (VALUES ('perf0524z')), + +bulk_initial_data as ( + select + to_timestamp(split_part(j.original_file_name, ' ', 1), 'YYYY-MM-DD HH24:MI:SS.US') as posted_at, + n.created_at, n.sent_at, n.updated_at, notification_status as status, t.process_type as priority + from notifications n join templates t on n.template_id = t.id + join jobs j on n.job_id = j.id + where j.original_file_name like concat('%', (table ref), '-bulk%') +), +bulk_data as ( + select *, + EXTRACT(epoch FROM updated_at - posted_at) as total_time, + EXTRACT(epoch FROM created_at - posted_at) as redis_time, + EXTRACT(epoch FROM sent_at - created_at) as processing_time, + EXTRACT(epoch FROM updated_at - sent_at) as delivery_time + from bulk_initial_data +), +bulk_stats as ( + select + '/bulk' endpoint, + status, priority, count(*), + percentile_cont(0.5) within group(order by redis_time) AS redis_median, + percentile_cont(0.5) within group(order by processing_time) AS processing_median, + percentile_cont(0.5) within group(order by delivery_time) AS delivery_median + from bulk_data + group by priority, status +), +email_initial_data as ( + select + to_timestamp(split_part(client_reference, ' ', 1), 'YYYY-MM-DD HH24:MI:SS.US') as posted_at, + n.created_at, n.sent_at, n.updated_at, client_reference, notification_status as status, t.process_type as priority + from notifications n join templates t on n.template_id = t.id + where client_reference like concat('%', (table ref), '-email%') +), +email_data as ( + select *, + EXTRACT(epoch FROM updated_at - posted_at) as total_time, + EXTRACT(epoch FROM created_at - posted_at) as redis_time, + EXTRACT(epoch FROM sent_at - created_at) as processing_time, + EXTRACT(epoch FROM updated_at - sent_at) as delivery_time + from email_initial_data +), +email_stats as ( + select + '/email' endpoint, + status, priority, count(*), + percentile_cont(0.5) within group(order by redis_time) AS redis_median, + percentile_cont(0.5) within group(order by processing_time) AS processing_median, + percentile_cont(0.5) within group(order by delivery_time) AS delivery_median + from email_data + group by priority, status +) +select * from email_stats +union all +select * from bulk_stats +``` + +### Posts to /sms + +Similarly you can test sms with a command similar to +``` +locust -f ./tasks_individual_sms.py --run-time=10m --users=20 --ref=perf_sms_0112-aa +``` +To see the timings, run the SQL +```sql +WITH + ref AS (VALUES ('perf_sms_0112-aa')), + +initial_data as ( + select + n.created_at, n.sent_at, n.updated_at, client_reference, notification_status as status, t.process_type as priority + from notifications n join templates t on n.template_id = t.id + where client_reference like concat('%', (table ref), '%') +), +data as ( + select *, + EXTRACT(epoch FROM updated_at - created_at) as total_time, + EXTRACT(epoch FROM sent_at - created_at) as processing_time, + EXTRACT(epoch FROM updated_at - sent_at) as delivery_time + from initial_data +), +stats as ( + select + '/sms' endpoint, + status, priority, count(*), + percentile_cont(0.5) within group(order by total_time) AS total_median, + percentile_cont(0.5) within group(order by processing_time) AS processing_median, + percentile_cont(0.5) within group(order by delivery_time) AS delivery_median + from data + group by priority, status +) +select * from stats +``` diff --git a/tests-perf/priorities/config.py b/tests-perf/priorities/config.py new file mode 100644 index 0000000000..9718656ae8 --- /dev/null +++ b/tests-perf/priorities/config.py @@ -0,0 +1,30 @@ +import os + +from dotenv import load_dotenv + +load_dotenv() + + +class Config: + API_HOST_NAME = os.environ.get("API_HOST_NAME") + API_KEY = os.environ.get("API_KEY") + + EMAIL_TO = os.environ.get("EMAIL_TO", "success@simulator.amazonses.com") + BULK_EMAIL_TEMPLATE = os.environ.get("BULK_EMAIL_TEMPLATE") + NORMAL_EMAIL_TEMPLATE = os.environ.get("NORMAL_EMAIL_TEMPLATE") + PRIORITY_EMAIL_TEMPLATE = os.environ.get("PRIORITY_EMAIL_TEMPLATE") + + SMS_TO = os.environ.get("SMS_TO", "+16135550123") # Notify internal test number + BULK_SMS_TEMPLATE = os.environ.get("BULK_SMS_TEMPLATE") + NORMAL_SMS_TEMPLATE = os.environ.get("NORMAL_SMS_TEMPLATE") + PRIORITY_SMS_TEMPLATE = os.environ.get("PRIORITY_SMS_TEMPLATE") + + JOB_SIZE = int(os.environ.get("JOB_SIZE", "10")) + + @classmethod + def validate(cls): + for x in ["API_KEY", "API_HOST_NAME", "EMAIL_TO", "SMS_TO", "JOB_SIZE"]: + assert getattr(cls, x), f"Need {x}" + + +Config.validate() diff --git a/tests-perf/priorities/locust.conf b/tests-perf/priorities/locust.conf new file mode 100644 index 0000000000..8f34a41ad2 --- /dev/null +++ b/tests-perf/priorities/locust.conf @@ -0,0 +1,3 @@ +headless = true +spawn-rate = 20 +stop-timeout = 30 diff --git a/tests-perf/priorities/locust_setup.py b/tests-perf/priorities/locust_setup.py new file mode 100644 index 0000000000..0c0fc63e5a --- /dev/null +++ b/tests-perf/priorities/locust_setup.py @@ -0,0 +1,6 @@ +from locust import events + + +@events.init_command_line_parser.add_listener +def init_parser(parser): + parser.add_argument("--ref", type=str, default="perf test", help="reference string for database search") diff --git a/tests-perf/priorities/tasks_bulk_endpoint.py b/tests-perf/priorities/tasks_bulk_endpoint.py new file mode 100644 index 0000000000..4391462c04 --- /dev/null +++ b/tests-perf/priorities/tasks_bulk_endpoint.py @@ -0,0 +1,28 @@ +from datetime import datetime + +import locust_setup # noqa: F401 - this file configures locust +from locust import HttpUser, constant_pacing, task + +from config import Config +from utils import api_headers, job_line, rows_to_csv + +""" +Usage: +runs the bulk upload twice + +locust -f ./locust_bulk_endpoint.py --headless --stop-timeout=30 --run-time=15s --host=https://api-k8s.staging.notification.cdssandbox.xyz --users=1 --html=locust.html +""" + + +class ApiUser(HttpUser): + wait_time = constant_pacing(10) # run once every 10 second + host = Config.API_HOST_NAME + + @task + def send_bulk(self): + json = { + "name": f"{datetime.utcnow().isoformat()} {self.environment.parsed_options.ref}", + "template_id": Config.BULK_EMAIL_TEMPLATE, + "csv": rows_to_csv([["email address"], *job_line(Config.EMAIL_TO, Config.JOB_SIZE)]), + } + self.client.post("/v2/notifications/bulk", json=json, headers=api_headers(Config.API_KEY)) diff --git a/tests-perf/priorities/tasks_individual_emails.py b/tests-perf/priorities/tasks_individual_emails.py new file mode 100644 index 0000000000..946a87babd --- /dev/null +++ b/tests-perf/priorities/tasks_individual_emails.py @@ -0,0 +1,42 @@ +import random +import time + +import locust_setup # noqa: F401 - this file configures locust +from locust import HttpUser, constant_throughput, task + +from config import Config +from utils import api_headers, json_data + +# for 4000 requests per minute, set number of users to be 4000 / 60 = 67 + + +class ApiUser(HttpUser): + wait_time = constant_throughput(1) # run once every second + host = Config.API_HOST_NAME + + @task(75) + def send_bulk_email(self): + time.sleep(random.random()) # prevent users from POSTing at the same time + self.client.post( + "/v2/notifications/email", + json=json_data(Config.EMAIL_TO, Config.BULK_EMAIL_TEMPLATE, self.environment.parsed_options.ref), + headers=api_headers(Config.API_KEY), + ) + + @task(20) + def send_normal_email(self): + time.sleep(random.random()) # prevent users from POSTing at the same time + self.client.post( + "/v2/notifications/email", + json=json_data(Config.EMAIL_TO, Config.NORMAL_EMAIL_TEMPLATE, self.environment.parsed_options.ref), + headers=api_headers(Config.API_KEY), + ) + + @task(5) + def send_priority_email(self): + time.sleep(random.random()) # prevent users from POSTing at the same time + self.client.post( + "/v2/notifications/email", + json=json_data(Config.EMAIL_TO, Config.PRIORITY_EMAIL_TEMPLATE, self.environment.parsed_options.ref), + headers=api_headers(Config.API_KEY), + ) diff --git a/tests-perf/priorities/tasks_individual_sms.py b/tests-perf/priorities/tasks_individual_sms.py new file mode 100644 index 0000000000..14e7f05efb --- /dev/null +++ b/tests-perf/priorities/tasks_individual_sms.py @@ -0,0 +1,42 @@ +import random +import time + +import locust_setup # noqa: F401 - this file configures locust +from locust import HttpUser, constant_throughput, task + +from config import Config +from utils import api_headers, json_data_sms + +# for 4000 requests per minute, set number of users to be 4000 / 60 = 67 + + +class ApiUser(HttpUser): + wait_time = constant_throughput(1) # run once every second + host = Config.API_HOST_NAME + + @task(80) + def send_bulk_sms(self): + time.sleep(random.random()) # prevent users from POSTing at the same time + self.client.post( + "/v2/notifications/sms", + json=json_data_sms(Config.SMS_TO, Config.BULK_SMS_TEMPLATE, self.environment.parsed_options.ref), + headers=api_headers(Config.API_KEY), + ) + + @task(15) + def send_normal_sms(self): + time.sleep(random.random()) # prevent users from POSTing at the same time + self.client.post( + "/v2/notifications/sms", + json=json_data_sms(Config.SMS_TO, Config.NORMAL_SMS_TEMPLATE, self.environment.parsed_options.ref), + headers=api_headers(Config.API_KEY), + ) + + @task(5) + def send_priority_sms(self): + time.sleep(random.random()) # prevent users from POSTing at the same time + self.client.post( + "/v2/notifications/sms", + json=json_data_sms(Config.SMS_TO, Config.PRIORITY_SMS_TEMPLATE, self.environment.parsed_options.ref), + headers=api_headers(Config.API_KEY), + ) diff --git a/tests-perf/priorities/utils.py b/tests-perf/priorities/utils.py new file mode 100644 index 0000000000..cc1d28f655 --- /dev/null +++ b/tests-perf/priorities/utils.py @@ -0,0 +1,37 @@ +import csv +from datetime import datetime +from io import StringIO +from typing import Iterator, List, Optional + + +def api_headers(api_key: str): + return {"Authorization": f"ApiKey-v1 {api_key[-36:]}"} + + +def json_data(email_address: str, template_id: str, ref: str, personalisation: Optional[dict] = {}): + return { + "reference": f"{datetime.utcnow().isoformat()} {ref}", + "email_address": email_address, + "template_id": template_id, + "personalisation": personalisation, + } + + +def json_data_sms(phone_number: str, template_id: str, ref: str, personalisation: Optional[dict] = {}): + return { + "reference": f"{datetime.utcnow().isoformat()} {ref}", + "phone_number": phone_number, + "template_id": template_id, + "personalisation": personalisation, + } + + +def rows_to_csv(rows: List[List[str]]): + output = StringIO() + writer = csv.writer(output) + writer.writerows(rows) + return output.getvalue() + + +def job_line(data: str, number_of_lines: int) -> Iterator[List[str]]: + return map(lambda n: [data, f"var{n}"], range(0, number_of_lines)) diff --git a/tests-perf/taurus/.gitignore b/tests-perf/taurus/.gitignore deleted file mode 100644 index dac817135a..0000000000 --- a/tests-perf/taurus/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -# Ignore secret file -config.properties -notifications-results.csv \ No newline at end of file diff --git a/tests-perf/taurus/TestSendNotification.jmx b/tests-perf/taurus/TestSendNotification.jmx deleted file mode 100644 index d3d71a6855..0000000000 --- a/tests-perf/taurus/TestSendNotification.jmx +++ /dev/null @@ -1,157 +0,0 @@ - - - - - - false - true - false - - - - - - - - - - API_KEY - ${__P(TEST_API_KEY,)} - = - - - HOST - ${__P(TEST_HOST,)} - = - - - RUNTIME - ${__P(RUNTIME,30)} - = - - - USERS_SEC - ${__P(USERS_PER_SEC,20)} - = - - - - - - continue - - false - 1 - - 1000 - 10 - true - ${RUNTIME} - - true - - - - true - - - - false - { - "email_address": "success@simulator.amazonses.com", - "template_id": "9c17633c-126a-4ad3-ad2f-b14c3a85314a", - "personalisation": { - "colour": "Fulvous" - } -} - = - - - - ${HOST} - - https - - /v2/notifications/email - POST - true - false - true - false - true - - - - - - - - - Authorization - ${API_KEY} - - - Content-Type - application/json - - - - - - - 201 - - - Assertion.response_code - false - 8 - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - false - true - false - false - false - true - 0 - true - true - true - true - true - true - - - notifications-results.csv - - - - - Will keep a constant throughput per minute, based on $USERS_SEC - 2 - ${__jexl3(${USERS_SEC}*60)} - - - - - - diff --git a/tests-perf/taurus/run-jmeter.ps1 b/tests-perf/taurus/run-jmeter.ps1 deleted file mode 100644 index 33c58f6371..0000000000 --- a/tests-perf/taurus/run-jmeter.ps1 +++ /dev/null @@ -1 +0,0 @@ -jmeter -n -t '.\TestSendNotification.jmx' -j test.log -q config.properties \ No newline at end of file diff --git a/tests-perf/taurus/run-jmeter.sh b/tests-perf/taurus/run-jmeter.sh deleted file mode 100644 index e2f052cb34..0000000000 --- a/tests-perf/taurus/run-jmeter.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -jmeter -n -t './TestSendNotification.jmx' -j test.log -q config.properties \ No newline at end of file diff --git a/tests-perf/waf-rules/README.md b/tests-perf/waf-rules/README.md new file mode 100644 index 0000000000..4e6cbd68d4 --- /dev/null +++ b/tests-perf/waf-rules/README.md @@ -0,0 +1,33 @@ +# WAF rules overload tests + +## Goals + +Triggers blocking WAF rules with many users hitting sensitive endpoints many times. + +## How to configure + +There aren't much configuration options for these Locust tests. You can simply run these. + +## How to run the WAF tests + +There are two ways to run Locust, with the UI or headless. + +### With the UI + +Locally, simply run from the project's root: + +```shell +locust -f ./tests-perf/waf-rules/locust-trigger-rate-limit.py +``` + +Follow the localhost address that the console will display to get to the UI. It will ask you how many total users and spawned users you want configured. Once setup, you can manually start the tests via the UI and follow the summary data and charts visually. + +### Headless, via the command line + +You can pass the necessary parameters to the command line to run in the headless mode. For example: + +```shell +locust -f ./tests-perf/waf-rules/locust-trigger-rate-limit.py --headless --stop-timeout=10 --users=5 --html=waf-block.html +``` + +You can also set many of these parameters in the *locust.conf* file. diff --git a/tests-perf/waf-rules/locust-trigger-rate-limit-admin.py b/tests-perf/waf-rules/locust-trigger-rate-limit-admin.py new file mode 100644 index 0000000000..c5e51d85ef --- /dev/null +++ b/tests-perf/waf-rules/locust-trigger-rate-limit-admin.py @@ -0,0 +1,43 @@ +"""locust-trigger-rate-limit-admin.py + +Trigger rate limit on our WAF rules for the admin website +on the following endpoints: + +* Sign-in +* Register +* forgot password +* forced password reset + +Once the necessary rate limit has been attained, the +tests will start to fail as expected. +""" +# flake8: noqa + +from locust import HttpUser, constant_pacing, task + + +class NotifyAdminUser(HttpUser): + + host = "https://notification.canada.ca" + spawn_rate = 10 + wait_time = constant_pacing(1) + + def __init__(self, *args, **kwargs): + super(NotifyAdminUser, self).__init__(*args, **kwargs) + self.headers = {} + + @task(1) + def trigger_signin_block(self): + self.client.get("/sign-in", headers=self.headers) + + @task(1) + def trigger_register_block(self): + self.client.get("/register", headers=self.headers) + + @task(1) + def trigger_forgot_pw_block(self): + self.client.get("/forgot-password", headers=self.headers) + + @task(1) + def trigger_forced_pw_reset_block(self): + self.client.get("/forced-password-reset", headers=self.headers) diff --git a/tests-perf/waf-rules/locust-trigger-rate-limit-documentation.py b/tests-perf/waf-rules/locust-trigger-rate-limit-documentation.py new file mode 100644 index 0000000000..a4a4c00319 --- /dev/null +++ b/tests-perf/waf-rules/locust-trigger-rate-limit-documentation.py @@ -0,0 +1,28 @@ +"""locust-trigger-rate-limit-documentation.py + +Trigger rate limit on our WAF rules for the documentation website +on the following endpoints: + +* / + +Once the necessary rate limit has been attained, the +tests will start to fail as expected. +""" +# flake8: noqa + +from locust import HttpUser, constant_pacing, task + + +class NotifyDocumentationUser(HttpUser): + + host = "https://documentation.notification.canada.ca/" + spawn_rate = 10 + wait_time = constant_pacing(1) + + def __init__(self, *args, **kwargs): + super(NotifyDocumentationUser, self).__init__(*args, **kwargs) + self.headers = {} + + @task(1) + def trigger_home_block(self): + self.client.get("/", headers=self.headers) diff --git a/tests-perf/waf-rules/locust.conf b/tests-perf/waf-rules/locust.conf new file mode 100644 index 0000000000..5d1d12df70 --- /dev/null +++ b/tests-perf/waf-rules/locust.conf @@ -0,0 +1,2 @@ +spawn-rate = 10 +run-time = 20s diff --git a/tests/__init__.py b/tests/__init__.py index a1debdd999..5cf5c9313d 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -34,6 +34,14 @@ def create_authorization_header(service_id=None, key_type=KEY_TYPE_NORMAL): return "Authorization", "Bearer {}".format(token) +def create_sre_authorization_header(): + client_id = current_app.config["SRE_USER_NAME"] + secret = current_app.config["SRE_CLIENT_SECRET"] + + token = create_jwt_token(secret=secret, client_id=client_id) + return "Authorization", "Bearer {}".format(token) + + def unwrap_function(fn): """ Given a function, returns its undecorated original. diff --git a/tests/app/accept_invite/test_accept_invite_rest.py b/tests/app/accept_invite/test_accept_invite_rest.py index b05630af38..5e7eef097e 100644 --- a/tests/app/accept_invite/test_accept_invite_rest.py +++ b/tests/app/accept_invite/test_accept_invite_rest.py @@ -14,7 +14,6 @@ def test_validate_invitation_token_for_expired_token_returns_400(client, invitat token = generate_token( str(uuid.uuid4()), current_app.config["SECRET_KEY"], - current_app.config["DANGEROUS_SALT"], ) url = "/invite/{}/{}".format(invitation_type, token) auth_header = create_authorization_header() @@ -35,7 +34,6 @@ def test_validate_invitation_token_returns_200_when_token_valid( token = generate_token( str(invited_user.id), current_app.config["SECRET_KEY"], - current_app.config["DANGEROUS_SALT"], ) url = "/invite/{}/{}".format(invitation_type, token) auth_header = create_authorization_header() @@ -60,7 +58,6 @@ def test_validate_invitation_token_returns_400_when_invited_user_does_not_exist( token = generate_token( str(uuid.uuid4()), current_app.config["SECRET_KEY"], - current_app.config["DANGEROUS_SALT"], ) url = "/invite/{}/{}".format(invitation_type, token) auth_header = create_authorization_header() @@ -77,7 +74,6 @@ def test_validate_invitation_token_returns_400_when_token_is_malformed(client, i token = generate_token( str(uuid.uuid4()), current_app.config["SECRET_KEY"], - current_app.config["DANGEROUS_SALT"], )[:-2] url = "/invite/{}/{}".format(invitation_type, token) diff --git a/tests/app/api_key/test_rest.py b/tests/app/api_key/test_rest.py index 47637b8b99..d236f8cd8b 100644 --- a/tests/app/api_key/test_rest.py +++ b/tests/app/api_key/test_rest.py @@ -1,7 +1,9 @@ -from datetime import datetime +import pytest +from flask import url_for -from app import DATETIME_FORMAT +from app.dao.api_key_dao import get_api_key_by_secret, get_unsigned_secret from app.models import KEY_TYPE_NORMAL +from tests import create_sre_authorization_header from tests.app.db import ( create_api_key, create_notification, @@ -12,7 +14,6 @@ def test_get_api_key_stats_with_sends(admin_request, notify_db, notify_db_session): - service = create_service(service_name="Service 1") api_key = create_api_key(service) template = create_template(service=service, template_type="email") @@ -29,15 +30,8 @@ def test_get_api_key_stats_with_sends(admin_request, notify_db, notify_db_sessio assert api_key_stats["sms_sends"] == 0 assert api_key_stats["total_sends"] == total_sends - # the following lines test that a send has occurred within the last second - last_send_dt = datetime.strptime(api_key_stats["last_send"], DATETIME_FORMAT) - now = datetime.utcnow() - time_delta = now - last_send_dt - assert abs(time_delta.total_seconds()) < 1 - def test_get_api_key_stats_no_sends(admin_request, notify_db, notify_db_session): - service = create_service(service_name="Service 2") api_key = create_api_key(service) @@ -51,7 +45,6 @@ def test_get_api_key_stats_no_sends(admin_request, notify_db, notify_db_session) def test_get_api_keys_ranked(admin_request, notify_db, notify_db_session): - service = create_service(service_name="Service 1") api_key_1 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 1") api_key_2 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 2") @@ -78,3 +71,91 @@ def test_get_api_keys_ranked(admin_request, notify_db, notify_db_session): assert api_keys_ranked[1]["email_notifications"] == total_sends assert api_keys_ranked[1]["total_notifications"] == total_sends assert "last_notification_created" in api_keys_ranked[0] + + +class TestApiKeyRevocation: + def test_revoke_api_keys_with_valid_auth_revokes_and_notifies_user(self, client, notify_db, notify_db_session, mocker): + notify_users = mocker.patch("app.api_key.rest.send_api_key_revocation_email") + + service = create_service(service_name="Service 1") + api_key_1 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 1") + unsigned_secret = get_unsigned_secret(api_key_1.id) + + # Create token expected from the frontend + unsigned_secret = f"gcntfy-keyname-{service.id}-{unsigned_secret}" + + sre_auth_header = create_sre_authorization_header() + response = client.post( + url_for("sre_tools.revoke_api_keys"), + headers=[sre_auth_header], + json={"token": unsigned_secret, "type": "cds-tester", "url": "https://example.com", "source": "cds-tester"}, + ) + + # Get api key from DB + api_key_1 = get_api_key_by_secret(unsigned_secret) + assert response.status_code == 201 + assert api_key_1.expiry_date is not None + assert api_key_1.compromised_key_info["type"] == "cds-tester" + assert api_key_1.compromised_key_info["url"] == "https://example.com" + assert api_key_1.compromised_key_info["source"] == "cds-tester" + assert api_key_1.compromised_key_info["time_of_revocation"] + + notify_users.assert_called_once() + + def test_revoke_api_keys_fails_with_no_auth(self, client, notify_db, notify_db_session, mocker): + service = create_service(service_name="Service 1") + api_key_1 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 1") + unsigned_secret = get_unsigned_secret(api_key_1.id) + + response = client.post( + url_for("sre_tools.revoke_api_keys"), + headers=[], + json={"token": unsigned_secret, "type": "cds-tester", "url": "https://example.com", "source": "cds-tester"}, + ) + + assert response.status_code == 401 + + @pytest.mark.parametrize( + "payload", + ( + { + # no token + "type": "cds-tester", + "url": "https://example.com", + "source": "cds-tester", + }, + { + "token": "token", + # no type + "url": "https://example.com", + "source": "cds-tester", + }, + { + "token": "token", + "type": "cds-tester", + # no url + "source": "cds-tester", + }, + { + "token": "token", + "type": "cds-tester", + "url": "https://example.com", + # no source + }, + { + # no anything + }, + {"token": "token", "type": "cds-tester", "url": "https://example.com", "source": "cds-tester"}, # invalid token + ), + ) + def test_revoke_api_keys_fails_with_400_missing_or_invalid_payload( + self, client, notify_db, notify_db_session, mocker, payload + ): + sre_auth_header = create_sre_authorization_header() + response = client.post( + url_for("sre_tools.revoke_api_keys"), + headers=[sre_auth_header], + json=payload, + ) + + assert response.status_code == 400 diff --git a/tests/app/authentication/test_authentication.py b/tests/app/authentication/test_authentication.py index 492b5ef473..40615fc6a6 100644 --- a/tests/app/authentication/test_authentication.py +++ b/tests/app/authentication/test_authentication.py @@ -4,11 +4,10 @@ import jwt import pytest -from flask import current_app, json, request +from flask import current_app, g, json, request from freezegun import freeze_time from notifications_python_client.authentication import create_jwt_token -from app import api_user from app.authentication.auth import ( AUTH_TYPES, AuthError, @@ -136,18 +135,28 @@ def test_admin_auth_should_not_allow_api_key_scheme(client, sample_api_key): @pytest.mark.parametrize("scheme", ["ApiKey-v1", "apikey-v1", "APIKEY-V1"]) def test_should_allow_auth_with_api_key_scheme(client, sample_api_key, scheme): api_key_secret = get_unsigned_secret(sample_api_key.id) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + response = client.get("/notifications", headers={"Authorization": f"{scheme} {unsigned_secret}"}) - response = client.get("/notifications", headers={"Authorization": f"{scheme} {api_key_secret}"}) + assert response.status_code == 200 + + +def test_should_allow_auth_with_api_key_scheme_and_extra_spaces(client, sample_api_key): + api_key_secret = get_unsigned_secret(sample_api_key.id) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {unsigned_secret}"}) assert response.status_code == 200 -def test_should_allow_auth_with_api_key_scheme_36_chars_or_longer(client, sample_api_key): +def test_should_NOT_allow_auth_with_api_key_scheme_with_incorrect_format(client, sample_api_key): api_key_secret = "fhsdkjhfdsfhsd" + get_unsigned_secret(sample_api_key.id) response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {api_key_secret}"}) - assert response.status_code == 200 + assert response.status_code == 403 + error_message = json.loads(response.get_data()) + assert error_message["message"] == {"token": ["Invalid token: Enter your full API key"]} def test_should_not_allow_invalid_api_key(client, sample_api_key): @@ -155,7 +164,7 @@ def test_should_not_allow_invalid_api_key(client, sample_api_key): assert response.status_code == 403 error_message = json.loads(response.get_data()) - assert error_message["message"] == {"token": ["Invalid token: API key not found"]} + assert error_message["message"] == {"token": ["Invalid token: Enter your full API key"]} def test_should_not_allow_expired_api_key(client, sample_api_key): @@ -163,7 +172,9 @@ def test_should_not_allow_expired_api_key(client, sample_api_key): expire_api_key(service_id=sample_api_key.service_id, api_key_id=sample_api_key.id) - response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {api_key_secret}"}) + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{api_key_secret}" + + response = client.get("/notifications", headers={"Authorization": f"ApiKey-v1 {unsigned_secret}"}) assert response.status_code == 403 error_message = json.loads(response.get_data()) @@ -336,7 +347,7 @@ def test_should_attach_the_current_api_key_to_current_app(notify_api, sample_ser token = __create_token(sample_api_key.service_id) response = client.get("/notifications", headers={"Authorization": "Bearer {}".format(token)}) assert response.status_code == 200 - assert api_user == sample_api_key + assert g.api_user == sample_api_key def test_should_return_403_when_token_is_expired( @@ -381,7 +392,6 @@ def test_proxy_key_non_auth_endpoint(notify_api, check_proxy_header, header_valu "CHECK_PROXY_HEADER": check_proxy_header, }, ): - with notify_api.test_client() as client: response = client.get( path="/_status", @@ -415,7 +425,6 @@ def test_proxy_key_on_admin_auth_endpoint(notify_api, check_proxy_header, header "CHECK_PROXY_HEADER": check_proxy_header, }, ): - with notify_api.test_client() as client: response = client.get( path="/service", diff --git a/tests/app/aws/test_metric_logger.py b/tests/app/aws/test_metric_logger.py new file mode 100644 index 0000000000..de92111f63 --- /dev/null +++ b/tests/app/aws/test_metric_logger.py @@ -0,0 +1,42 @@ +from os import environ +from unittest.mock import patch +from uuid import uuid4 + +from aws_embedded_metrics.config import get_config # type: ignore +from aws_embedded_metrics.environment.ec2_environment import ( # type: ignore + EC2Environment, +) +from aws_embedded_metrics.environment.lambda_environment import ( # type: ignore + LambdaEnvironment, +) + +from app.aws.metrics_logger import MetricsLogger + + +class TestMetricsLogger: + def test_environment_defaults_to_ec2(self): + metrics_config = get_config() + metrics_config.environment = "" + metrics_logger = MetricsLogger() + assert type(metrics_logger.environment) is EC2Environment + + @patch.dict(environ, {"AWS_EXECUTION_ENV": "foo"}, clear=True) + def test_environment_set_lambda_when_lambda_envs_exist(self): + metrics_logger = MetricsLogger() + assert type(metrics_logger.environment) is LambdaEnvironment + + def test_environment_changes_when_set(self): + metrics_config = get_config() + metrics_config.environment = "lambda" + metrics_logger = MetricsLogger() + assert type(metrics_logger.environment) is LambdaEnvironment + + def test_flush_writes_to_stdout(self, capsys): + metrics_config = get_config() + metrics_config.environment = "local" + metrics_logger = MetricsLogger() + metric_name = f"foo_bar_baz_{str(uuid4())}" + metrics_logger.put_metric(metric_name, 1, "Count") + metrics_logger.flush() + captured = capsys.readouterr() + assert metric_name in str(captured.out) diff --git a/tests/app/aws/test_metrics.py b/tests/app/aws/test_metrics.py new file mode 100644 index 0000000000..ea307a9cb9 --- /dev/null +++ b/tests/app/aws/test_metrics.py @@ -0,0 +1,118 @@ +from unittest.mock import call + +import pytest +from botocore.exceptions import ClientError +from flask import Flask + +from app import create_app +from app.aws.metrics import ( + put_batch_saving_bulk_created, + put_batch_saving_bulk_processed, + put_batch_saving_expiry_metric, + put_batch_saving_inflight_metric, + put_batch_saving_inflight_processed, + put_batch_saving_metric, +) +from app.config import Config, Test + + +class TestBatchSavingMetricsFunctions: + @pytest.fixture(autouse=True) + def app(self): + config: Config = Test() # type: ignore + config.REDIS_ENABLED = True + app = Flask(config.NOTIFY_ENVIRONMENT) + create_app(app, config) + ctx = app.app_context() + ctx.push() + with app.test_request_context(): + yield app + ctx.pop() + return app + + @pytest.fixture(autouse=True) + def metrics_logger_mock(self, mocker): + metrics_logger_mock = mocker.patch("app.aws.metrics_logger") + metrics_logger_mock.metrics_config.disable_metric_extraction = False + return metrics_logger_mock + + def test_put_batch_metric(self, mocker, metrics_logger_mock): + redis_queue = mocker.MagicMock() + redis_queue._inbox = "foo" + put_batch_saving_metric(metrics_logger_mock, redis_queue, 1) + metrics_logger_mock.set_dimensions.assert_called_with({"list_name": "foo"}) + metrics_logger_mock.put_metric.assert_called_with("batch_saving_published", 1, "Count") + assert metrics_logger_mock.set_dimensions.called, "set_dimensions was not called and should have been" + + def test_put_batch_metric_disabled(self, mocker, metrics_logger_mock): + redis_queue = mocker.MagicMock() + redis_queue._inbox = "foo" + metrics_logger_mock.metrics_config.disable_metric_extraction = True + put_batch_saving_metric(metrics_logger_mock, redis_queue, 1) + assert not metrics_logger_mock.set_dimensions.called, "set_dimensions was called and should not have been" + assert not metrics_logger_mock.put_metric.called, "put_metric was called and should not have been" + + def test_put_batch_metric_multiple_items(self, mocker, metrics_logger_mock): + redis_queue = mocker.MagicMock() + redis_queue._inbox = "foo" + + put_batch_saving_metric(metrics_logger_mock, redis_queue, 20) + metrics_logger_mock.set_dimensions.assert_called_with({"list_name": "foo"}) + metrics_logger_mock.put_metric.assert_called_with("batch_saving_published", 20, "Count") + + def test_put_batch_saving_in_flight_metric(self, mocker, metrics_logger_mock): + redis_queue = mocker.MagicMock() + redis_queue._suffix = "foo" + redis_queue._process_type = "bar" + put_batch_saving_inflight_metric(metrics_logger_mock, redis_queue, 1) + metrics_logger_mock.set_dimensions.assert_called_with({"created": "True", "notification_type": "foo", "priority": "bar"}) + metrics_logger_mock.put_metric.assert_called_with("batch_saving_inflight", 1, "Count") + + def test_put_batch_saving_inflight_processed(self, mocker, metrics_logger_mock): + redis_queue = mocker.MagicMock() + redis_queue._suffix = "foo" + redis_queue._process_type = "bar" + put_batch_saving_inflight_processed(metrics_logger_mock, redis_queue, 1) + metrics_logger_mock.set_dimensions.assert_called_with( + {"acknowledged": "True", "notification_type": "foo", "priority": "bar"} + ) + metrics_logger_mock.put_metric.assert_called_with("batch_saving_inflight", 1, "Count") + + def test_put_batch_saving_expiry_metric(self, mocker, metrics_logger_mock): + redis_queue = mocker.MagicMock() + redis_queue._suffix = "foo" + redis_queue._process_type = "bar" + put_batch_saving_expiry_metric(metrics_logger_mock, redis_queue, 1) + metrics_logger_mock.put_metric.assert_called_with("batch_saving_inflight", 1, "Count") + metrics_logger_mock.set_dimensions.assert_has_calls( + [ + call({"expired": "True", "notification_type": "foo", "priority": "bar"}), + call({"expired": "True", "notification_type": "any", "priority": "any"}), + ] + ) + + def test_put_batch_saving_bulk_created(self, mocker, metrics_logger_mock): + put_batch_saving_bulk_created(metrics_logger_mock, 1, "foo", "bar") + metrics_logger_mock.put_metric.assert_called_with("batch_saving_bulk", 1, "Count") + metrics_logger_mock.set_dimensions.assert_called_with({"created": "True", "notification_type": "foo", "priority": "bar"}) + + def test_put_batch_saving_bulk_processed(self, mocker, metrics_logger_mock): + put_batch_saving_bulk_processed(metrics_logger_mock, 1, notification_type="foo", priority="bar") + metrics_logger_mock.put_metric.assert_called_with("batch_saving_bulk", 1, "Count") + metrics_logger_mock.set_dimensions.assert_called_with( + {"acknowledged": "True", "notification_type": "foo", "priority": "bar"} + ) + + def test_put_batch_metric_unknown_error(self, mocker, metrics_logger_mock): + redis_queue = mocker.MagicMock() + mock_logger = mocker.patch("app.aws.metrics.current_app.logger.warning") + redis_queue._inbox = "foo" + + metrics_logger_mock.flush.side_effect = ClientError( + {"Error": {"Code": "ResourceNotFoundException", "Message": "Not Found"}}, "bar" + ) + + put_batch_saving_metric(metrics_logger_mock, redis_queue, 1) + mock_logger.assert_called_with( + "Error sending CloudWatch Metric: An error occurred (ResourceNotFoundException) when calling the bar operation: Not Found" + ) diff --git a/tests/app/aws/test_s3.py b/tests/app/aws/test_s3.py index bae56c3f45..02de33cbba 100644 --- a/tests/app/aws/test_s3.py +++ b/tests/app/aws/test_s3.py @@ -1,6 +1,6 @@ import uuid from datetime import datetime, timedelta -from unittest.mock import call +from unittest.mock import Mock, call import pytest import pytz @@ -12,6 +12,7 @@ get_list_of_files_by_suffix, get_s3_bucket_objects, get_s3_file, + remove_jobs_from_s3, remove_transformed_dvla_file, upload_job_to_s3, ) @@ -214,3 +215,30 @@ def test_upload_job_to_s3(notify_api, mocker): bucket_name=current_app.config["CSV_UPLOAD_BUCKET_NAME"], file_location=f"service-{service_id}-notify/{upload_id}.csv", ) + + +def test_remove_jobs_from_s3(notify_api, mocker): + mock = Mock() + mocker.patch("app.aws.s3.resource", return_value=mock) + jobs = [ + type("Job", (object,), {"service_id": "foo", "id": "j1"}), + type("Job", (object,), {"service_id": "foo", "id": "j2"}), + type("Job", (object,), {"service_id": "foo", "id": "j3"}), + type("Job", (object,), {"service_id": "foo", "id": "j4"}), + type("Job", (object,), {"service_id": "foo", "id": "j5"}), + ] + + remove_jobs_from_s3(jobs, batch_size=2) + + mock.assert_has_calls( + [ + call.Bucket(current_app.config["CSV_UPLOAD_BUCKET_NAME"]), + call.Bucket().delete_objects( + Delete={"Objects": [{"Key": "service-foo-notify/j1.csv"}, {"Key": "service-foo-notify/j2.csv"}]} + ), + call.Bucket().delete_objects( + Delete={"Objects": [{"Key": "service-foo-notify/j3.csv"}, {"Key": "service-foo-notify/j4.csv"}]} + ), + call.Bucket().delete_objects(Delete={"Objects": [{"Key": "service-foo-notify/j5.csv"}]}), + ] + ) diff --git a/tests/app/billing/test_billing.py b/tests/app/billing/test_billing.py index b9330e14ff..652bf68b0d 100644 --- a/tests/app/billing/test_billing.py +++ b/tests/app/billing/test_billing.py @@ -35,7 +35,6 @@ def _assert_dict_equals(actual, expected_dict): def test_create_update_free_sms_fragment_limit_invalid_schema(client, sample_service): - response = client.post( "service/{}/billing/free-sms-fragment-limit".format(sample_service.id), data={}, @@ -100,7 +99,6 @@ def test_create_free_sms_fragment_limit_updates_existing_year(admin_request, sam def test_get_free_sms_fragment_limit_current_year_creates_new_row(client, sample_service): - current_year = get_current_financial_year_start_year() create_annual_billing(sample_service.id, 9999, current_year - 1) diff --git a/tests/app/celery/test_ftp_update_tasks.py b/tests/app/celery/test_ftp_update_tasks.py index 674474cb87..114c5a8259 100644 --- a/tests/app/celery/test_ftp_update_tasks.py +++ b/tests/app/celery/test_ftp_update_tasks.py @@ -1,38 +1,10 @@ -from collections import defaultdict, namedtuple -from datetime import date, datetime +from collections import namedtuple +from datetime import date import pytest -from flask import current_app -from freezegun import freeze_time -from app.celery.tasks import ( - check_billable_units, - get_billing_date_in_est_from_filename, - persist_daily_sorted_letter_counts, - process_updates_from_file, - record_daily_sorted_counts, - update_letter_notifications_statuses, - update_letter_notifications_to_error, - update_letter_notifications_to_sent_to_dvla, -) -from app.dao.daily_sorted_letter_dao import dao_get_daily_sorted_letter_by_billing_day -from app.exceptions import DVLAException, NotificationTechnicalFailureException -from app.models import ( - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_SENDING, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_TEMPORARY_FAILURE, - DailySortedLetter, - NotificationHistory, -) -from tests.app.db import ( - create_notification, - create_notification_history, - create_service_callback_api, - save_notification, -) -from tests.conftest import set_config +from app.celery.tasks import check_billable_units, get_billing_date_in_est_from_filename +from tests.app.db import create_notification, save_notification @pytest.fixture @@ -44,182 +16,6 @@ def notification_update(): return NotificationUpdate("REFERENCE_ABC", "sent", "1", "cost") -def test_update_letter_notifications_statuses_raises_for_invalid_format(notify_api, mocker): - invalid_file = "ref-foo|Sent|1|Unsorted\nref-bar|Sent|2" - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=invalid_file) - - with pytest.raises(DVLAException) as e: - update_letter_notifications_statuses(filename="NOTIFY-20170823160812-RSP.TXT") - assert "DVLA response file: {} has an invalid format".format("NOTIFY-20170823160812-RSP.TXT") in str(e) - - -def test_update_letter_notification_statuses_when_notification_does_not_exist_updates_notification_history( - sample_letter_template, mocker -): - valid_file = "ref-foo|Sent|1|Unsorted" - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=valid_file) - notification = create_notification_history( - sample_letter_template, - reference="ref-foo", - status=NOTIFICATION_SENDING, - billable_units=1, - ) - - update_letter_notifications_statuses(filename="NOTIFY-20170823160812-RSP.TXT") - - updated_history = NotificationHistory.query.filter_by(id=notification.id).one() - assert updated_history.status == NOTIFICATION_DELIVERED - - -def test_update_letter_notifications_statuses_raises_dvla_exception(notify_api, mocker, sample_letter_template): - valid_file = "ref-foo|Failed|1|Unsorted" - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=valid_file) - save_notification( - create_notification( - sample_letter_template, - reference="ref-foo", - status=NOTIFICATION_SENDING, - billable_units=0, - ) - ) - - with pytest.raises(DVLAException) as e: - update_letter_notifications_statuses(filename="failed.txt") - failed = ["ref-foo"] - assert "DVLA response file: {filename} has failed letters with notification.reference {failures}".format( - filename="failed.txt", failures=failed - ) in str(e) - - -def test_update_letter_notifications_statuses_calls_with_correct_bucket_location(notify_api, mocker): - s3_mock = mocker.patch("app.celery.tasks.s3.get_s3_object") - - with set_config(notify_api, "NOTIFY_EMAIL_DOMAIN", "foo.bar"): - update_letter_notifications_statuses(filename="NOTIFY-20170823160812-RSP.TXT") - s3_mock.assert_called_with( - "{}-ftp".format(current_app.config["NOTIFY_EMAIL_DOMAIN"]), - "NOTIFY-20170823160812-RSP.TXT", - ) - - -def test_update_letter_notifications_statuses_builds_updates_from_content(notify_api, mocker): - valid_file = "ref-foo|Sent|1|Unsorted\nref-bar|Sent|2|Sorted" - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=valid_file) - update_mock = mocker.patch("app.celery.tasks.process_updates_from_file") - - update_letter_notifications_statuses(filename="NOTIFY-20170823160812-RSP.TXT") - - update_mock.assert_called_with("ref-foo|Sent|1|Unsorted\nref-bar|Sent|2|Sorted") - - -def test_update_letter_notifications_statuses_builds_updates_list(notify_api, mocker): - valid_file = "ref-foo|Sent|1|Unsorted\nref-bar|Sent|2|Sorted" - updates = process_updates_from_file(valid_file) - - assert len(updates) == 2 - - assert updates[0].reference == "ref-foo" - assert updates[0].status == "Sent" - assert updates[0].page_count == "1" - assert updates[0].cost_threshold == "Unsorted" - - assert updates[1].reference == "ref-bar" - assert updates[1].status == "Sent" - assert updates[1].page_count == "2" - assert updates[1].cost_threshold == "Sorted" - - -def test_update_letter_notifications_statuses_persisted(notify_api, mocker, sample_letter_template): - sent_letter = save_notification( - create_notification( - sample_letter_template, - reference="ref-foo", - status=NOTIFICATION_SENDING, - billable_units=1, - ) - ) - failed_letter = save_notification( - create_notification( - sample_letter_template, - reference="ref-bar", - status=NOTIFICATION_SENDING, - billable_units=2, - ) - ) - create_service_callback_api(service=sample_letter_template.service, url="https://original_url.com") - valid_file = "{}|Sent|1|Unsorted\n{}|Failed|2|Sorted".format(sent_letter.reference, failed_letter.reference) - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=valid_file) - - with pytest.raises(expected_exception=DVLAException) as e: - update_letter_notifications_statuses(filename="NOTIFY-20170823160812-RSP.TXT") - - assert sent_letter.status == NOTIFICATION_DELIVERED - assert sent_letter.billable_units == 1 - assert sent_letter.updated_at - assert failed_letter.status == NOTIFICATION_TEMPORARY_FAILURE - assert failed_letter.billable_units == 2 - assert failed_letter.updated_at - assert ( - "DVLA response file: {filename} has failed letters with notification.reference {failures}".format( - filename="NOTIFY-20170823160812-RSP.TXT", - failures=[format(failed_letter.reference)], - ) - in str(e) - ) - - -def test_update_letter_notifications_does_not_call_send_callback_if_no_db_entry(notify_api, mocker, sample_letter_template): - sent_letter = save_notification( - create_notification( - sample_letter_template, - reference="ref-foo", - status=NOTIFICATION_SENDING, - billable_units=0, - ) - ) - valid_file = "{}|Sent|1|Unsorted\n".format(sent_letter.reference) - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=valid_file) - - send_mock = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async") - - update_letter_notifications_statuses(filename="NOTIFY-20170823160812-RSP.TXT") - send_mock.assert_not_called() - - -def test_update_letter_notifications_to_sent_to_dvla_updates_based_on_notification_references(client, sample_letter_template): - first = save_notification(create_notification(sample_letter_template, reference="first ref")) - second = save_notification(create_notification(sample_letter_template, reference="second ref")) - - dt = datetime.utcnow() - with freeze_time(dt): - update_letter_notifications_to_sent_to_dvla([first.reference]) - - assert first.status == NOTIFICATION_SENDING - assert first.sent_by == "dvla" - assert first.sent_at == dt - assert first.updated_at == dt - assert second.status == NOTIFICATION_CREATED - - -def test_update_letter_notifications_to_error_updates_based_on_notification_references( - sample_letter_template, -): - first = save_notification(create_notification(sample_letter_template, reference="first ref")) - second = save_notification(create_notification(sample_letter_template, reference="second ref")) - create_service_callback_api(service=sample_letter_template.service, url="https://original_url.com") - dt = datetime.utcnow() - with freeze_time(dt): - with pytest.raises(NotificationTechnicalFailureException) as e: - update_letter_notifications_to_error([first.reference]) - assert first.reference in str(e.value) - - assert first.status == NOTIFICATION_TECHNICAL_FAILURE - assert first.sent_by is None - assert first.sent_at is None - assert first.updated_at == dt - assert second.status == NOTIFICATION_CREATED - - def test_check_billable_units_when_billable_units_matches_page_count(client, sample_letter_template, mocker, notification_update): mock_logger = mocker.patch("app.celery.tasks.current_app.logger.error") @@ -253,82 +49,3 @@ def test_get_billing_date_in_est_from_filename(filename_date, billing_date): result = get_billing_date_in_est_from_filename(filename) assert result == billing_date - - -@freeze_time("2018-01-11 09:00:00") -def test_persist_daily_sorted_letter_counts_saves_sorted_and_unsorted_values(client, notify_db_session): - letter_counts = defaultdict(int, **{"unsorted": 5, "sorted": 1}) - persist_daily_sorted_letter_counts(date.today(), "test.txt", letter_counts) - day = dao_get_daily_sorted_letter_by_billing_day(date.today()) - - assert day.unsorted_count == 5 - assert day.sorted_count == 1 - - -def test_record_daily_sorted_counts_persists_daily_sorted_letter_count( - notify_api, - notify_db_session, - mocker, -): - valid_file = "Letter1|Sent|1|uNsOrTeD\nLetter2|Sent|2|SORTED\nLetter3|Sent|2|Sorted" - - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=valid_file) - - assert DailySortedLetter.query.count() == 0 - - record_daily_sorted_counts(filename="NOTIFY-20170823160812-RSP.TXT") - - daily_sorted_counts = DailySortedLetter.query.all() - assert len(daily_sorted_counts) == 1 - assert daily_sorted_counts[0].sorted_count == 2 - assert daily_sorted_counts[0].unsorted_count == 1 - - -def test_record_daily_sorted_counts_raises_dvla_exception_with_unknown_sorted_status( - notify_api, - mocker, -): - file_contents = "ref-foo|Failed|1|invalid\nrow_2|Failed|1|MM" - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=file_contents) - filename = "failed.txt" - with pytest.raises(DVLAException) as e: - record_daily_sorted_counts(filename=filename) - - assert "DVLA response file: {} contains unknown Sorted status".format(filename) in e.value.message - assert "'mm'" in e.value.message - assert "'invalid'" in e.value.message - - -def test_record_daily_sorted_counts_persists_daily_sorted_letter_count_with_no_sorted_values( - notify_api, mocker, notify_db_session -): - valid_file = "Letter1|Sent|1|Unsorted\nLetter2|Sent|2|Unsorted" - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=valid_file) - - record_daily_sorted_counts(filename="NOTIFY-20170823160812-RSP.TXT") - - daily_sorted_letter = dao_get_daily_sorted_letter_by_billing_day(date(2017, 8, 23)) - - assert daily_sorted_letter.unsorted_count == 2 - assert daily_sorted_letter.sorted_count == 0 - - -def test_record_daily_sorted_counts_can_run_twice_for_same_file(notify_api, mocker, notify_db_session): - valid_file = "Letter1|Sent|1|sorted\nLetter2|Sent|2|Unsorted" - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=valid_file) - - record_daily_sorted_counts(filename="NOTIFY-20170823160812-RSP.TXT") - - daily_sorted_letter = dao_get_daily_sorted_letter_by_billing_day(date(2017, 8, 23)) - - assert daily_sorted_letter.unsorted_count == 1 - assert daily_sorted_letter.sorted_count == 1 - - updated_file = "Letter1|Sent|1|sorted\nLetter2|Sent|2|Unsorted\nLetter3|Sent|2|Unsorted" - mocker.patch("app.celery.tasks.s3.get_s3_file", return_value=updated_file) - - record_daily_sorted_counts(filename="NOTIFY-20170823160812-RSP.TXT") - daily_sorted_letter = dao_get_daily_sorted_letter_by_billing_day(date(2017, 8, 23)) - - assert daily_sorted_letter.unsorted_count == 2 - assert daily_sorted_letter.sorted_count == 1 diff --git a/tests/app/celery/test_letters_pdf_tasks.py b/tests/app/celery/test_letters_pdf_tasks.py deleted file mode 100644 index c179f74a75..0000000000 --- a/tests/app/celery/test_letters_pdf_tasks.py +++ /dev/null @@ -1,885 +0,0 @@ -import base64 -from unittest.mock import ANY, Mock, call - -import boto3 -import pytest -import requests_mock -from botocore.exceptions import ClientError -from flask import current_app -from freezegun import freeze_time -from moto import mock_s3 -from PyPDF2.utils import PdfReadError -from requests import RequestException -from sqlalchemy.orm.exc import NoResultFound - -from app.celery.letters_pdf_tasks import ( - _move_invalid_letter_and_update_status, - _sanitise_precompiled_pdf, - collate_letter_pdfs_for_day, - create_letters_pdf, - get_letters_pdf, - group_letters, - letter_in_created_state, - process_virus_scan_error, - process_virus_scan_failed, - process_virus_scan_passed, - replay_letters_in_error, -) -from app.errors import VirusScanError -from app.letters.utils import ScanErrorType -from app.models import ( - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_PENDING_VIRUS_CHECK, - NOTIFICATION_SENDING, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_VALIDATION_FAILED, - NOTIFICATION_VIRUS_SCAN_FAILED, - Notification, -) -from celery.exceptions import MaxRetriesExceededError, Retry -from tests.app.db import create_letter_branding, create_notification, save_notification -from tests.conftest import set_config_values - - -def test_should_have_decorated_tasks_functions(): - assert create_letters_pdf.__wrapped__.__name__ == "create_letters_pdf" - assert collate_letter_pdfs_for_day.__wrapped__.__name__ == "collate_letter_pdfs_for_day" - assert process_virus_scan_passed.__wrapped__.__name__ == "process_virus_scan_passed" - assert process_virus_scan_failed.__wrapped__.__name__ == "process_virus_scan_failed" - assert process_virus_scan_error.__wrapped__.__name__ == "process_virus_scan_error" - - -@pytest.mark.parametrize("personalisation", [{"name": "test"}, None]) -def test_get_letters_pdf_calls_notifications_template_preview_service_correctly( - notify_api, mocker, client, sample_letter_template, personalisation -): - contact_block = "Mr Foo,\n1 Test Street,\nLondon\nN1" - filename = "opg" - - with set_config_values( - notify_api, - { - "TEMPLATE_PREVIEW_API_HOST": "http://localhost/notifications-template-preview", - "TEMPLATE_PREVIEW_API_KEY": "test-key", - }, - ): - with requests_mock.Mocker() as request_mock: - mock_post = request_mock.post( - "http://localhost/notifications-template-preview/print.pdf", - content=b"\x00\x01", - status_code=200, - ) - - get_letters_pdf( - sample_letter_template, - contact_block=contact_block, - filename=filename, - values=personalisation, - ) - - assert mock_post.last_request.json() == { - "values": personalisation, - "letter_contact_block": contact_block, - "filename": filename, - "template": { - "subject": sample_letter_template.subject, - "content": sample_letter_template.content, - }, - } - - -@pytest.mark.parametrize("page_count,expected_billable_units", [("1", 1), ("2", 1), ("3", 2)]) -def test_get_letters_pdf_calculates_billing_units( - notify_api, - mocker, - client, - sample_letter_template, - page_count, - expected_billable_units, -): - contact_block = "Mr Foo,\n1 Test Street,\nLondon\nN1" - filename = "opg" - - with set_config_values( - notify_api, - { - "TEMPLATE_PREVIEW_API_HOST": "http://localhost/notifications-template-preview", - "TEMPLATE_PREVIEW_API_KEY": "test-key", - }, - ): - with requests_mock.Mocker() as request_mock: - request_mock.post( - "http://localhost/notifications-template-preview/print.pdf", - content=b"\x00\x01", - headers={"X-pdf-page-count": page_count}, - status_code=200, - ) - - _, billable_units = get_letters_pdf( - sample_letter_template, - contact_block=contact_block, - filename=filename, - values=None, - ) - - assert billable_units == expected_billable_units - - -@freeze_time("2017-12-04 17:31:00") -def test_create_letters_pdf_calls_s3upload(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", "1")) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - create_letters_pdf(sample_letter_notification.id) - - mock_s3.assert_called_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - file_location="2017-12-04/NOTIFY.FOO.D.2.C.C.20171204173100.PDF", - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@freeze_time("2017-12-04 17:31:00") -def test_create_letters_pdf_calls_s3upload_for_test_letters(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", "1")) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - sample_letter_notification.key_type = "test" - - create_letters_pdf(sample_letter_notification.id) - - mock_s3.assert_called_with( - bucket_name=current_app.config["TEST_LETTERS_BUCKET_NAME"], - file_location="NOTIFY.FOO.D.2.C.C.20171204173100.PDF", - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -def test_create_letters_pdf_sets_billable_units(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - - create_letters_pdf(sample_letter_notification.id) - noti = Notification.query.filter(Notification.reference == sample_letter_notification.reference).one() - assert noti.billable_units == 1 - - -def test_create_letters_pdf_non_existent_notification(notify_api, mocker, fake_uuid): - with pytest.raises(expected_exception=NoResultFound): - create_letters_pdf(fake_uuid) - - -def test_create_letters_pdf_handles_request_errors(mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", side_effect=RequestException) - mock_retry = mocker.patch("app.celery.letters_pdf_tasks.create_letters_pdf.retry") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_get_letters_pdf.called - assert mock_retry.called - - -def test_create_letters_pdf_handles_s3_errors(mocker, sample_letter_notification): - mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mock_s3 = mocker.patch( - "app.letters.utils.s3upload", - side_effect=ClientError(error_response, "operation_name"), - ) - mock_retry = mocker.patch("app.celery.letters_pdf_tasks.create_letters_pdf.retry") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_s3.called - assert mock_retry.called - - -def test_create_letters_pdf_sets_technical_failure_max_retries(mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", side_effect=RequestException) - mock_retry = mocker.patch( - "app.celery.letters_pdf_tasks.create_letters_pdf.retry", - side_effect=MaxRetriesExceededError, - ) - mock_update_noti = mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - - assert mock_get_letters_pdf.called - assert mock_retry.called - mock_update_noti.assert_called_once_with(sample_letter_notification.id, "technical-failure") - - -def test_create_letters_gets_the_right_logo_when_service_has_no_logo(notify_api, mocker, sample_letter_notification): - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - mock_get_letters_pdf.assert_called_once_with( - sample_letter_notification.template, - contact_block=sample_letter_notification.reply_to_text, - filename=None, - values=sample_letter_notification.personalisation, - ) - - -# We only need this while we are migrating to the new letter_branding model -def test_create_letters_gets_the_right_logo_when_service_has_letter_branding_logo(notify_api, mocker, sample_letter_notification): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - sample_letter_notification.service.letter_branding = letter_branding - mock_get_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.get_letters_pdf", return_value=(b"\x00\x01", 1)) - mocker.patch("app.letters.utils.s3upload") - mocker.patch("app.celery.letters_pdf_tasks.update_notification_status_by_id") - - create_letters_pdf(sample_letter_notification.id) - mock_get_letters_pdf.assert_called_once_with( - sample_letter_notification.template, - contact_block=sample_letter_notification.reply_to_text, - filename=sample_letter_notification.service.letter_branding.filename, - values=sample_letter_notification.personalisation, - ) - - -def test_collate_letter_pdfs_for_day(notify_api, mocker): - mock_s3 = mocker.patch( - "app.celery.tasks.s3.get_s3_bucket_objects", - return_value=[ - {"Key": "B.pDf", "Size": 2}, - {"Key": "A.PDF", "Size": 1}, - {"Key": "C.pdf", "Size": 3}, - ], - ) - mock_group_letters = mocker.patch( - "app.celery.letters_pdf_tasks.group_letters", - return_value=[ - [{"Key": "A.PDF", "Size": 1}, {"Key": "B.pDf", "Size": 2}], - [{"Key": "C.pdf", "Size": 3}], - ], - ) - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - - collate_letter_pdfs_for_day("2017-01-02") - - mock_s3.assert_called_once_with("test-letters-pdf", subfolder="2017-01-02") - mock_group_letters.assert_called_once_with(sorted(mock_s3.return_value, key=lambda x: x["Key"])) - assert mock_celery.call_args_list[0] == call( - name="zip-and-send-letter-pdfs", - kwargs={ - "filenames_to_zip": ["A.PDF", "B.pDf"], - "upload_filename": "NOTIFY.2017-01-02.001.oqdjIM2-NAUU9Sm5Slmi.ZIP", - }, - queue="process-ftp-tasks", - compression="zlib", - ) - assert mock_celery.call_args_list[1] == call( - name="zip-and-send-letter-pdfs", - kwargs={ - "filenames_to_zip": ["C.pdf"], - "upload_filename": "NOTIFY.2017-01-02.002.tdr7hcdPieiqjkVoS4kU.ZIP", - }, - queue="process-ftp-tasks", - compression="zlib", - ) - - -@freeze_time("2018-09-12 17:50:00") -def test_collate_letter_pdfs_for_day_works_without_date_param(notify_api, mocker): - mock_s3 = mocker.patch("app.celery.tasks.s3.get_s3_bucket_objects") - collate_letter_pdfs_for_day() - expected_date = "2018-09-12" - mock_s3.assert_called_once_with("test-letters-pdf", subfolder=expected_date) - - -def test_group_letters_splits_on_file_size(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - # ends under max but next one is too big - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - # ends on exactly max - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - # exactly max goes in next file - {"Key": "F.pdf", "Size": 5}, - # if it's bigger than the max, still gets included - {"Key": "G.pdf", "Size": 6}, - # whatever's left goes in last list - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - - with set_config_values(notify_api, {"MAX_LETTER_PDF_ZIP_FILESIZE": 5}): - x = group_letters(letters) - - assert next(x) == [{"Key": "A.pdf", "Size": 1}, {"Key": "B.pdf", "Size": 2}] - assert next(x) == [ - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "F.pdf", "Size": 5}] - assert next(x) == [{"Key": "G.pdf", "Size": 6}] - assert next(x) == [{"Key": "H.pdf", "Size": 1}, {"Key": "I.pdf", "Size": 1}] - # make sure iterator is exhausted - assert next(x, None) is None - - -def test_group_letters_splits_on_file_count(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - {"Key": "F.pdf", "Size": 5}, - {"Key": "G.pdf", "Size": 6}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - - with set_config_values(notify_api, {"MAX_LETTER_PDF_COUNT_PER_ZIP": 3}): - x = group_letters(letters) - - assert next(x) == [ - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - {"Key": "C.pdf", "Size": 3}, - ] - assert next(x) == [ - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - {"Key": "F.pdf", "Size": 5}, - ] - assert next(x) == [ - {"Key": "G.pdf", "Size": 6}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - # make sure iterator is exhausted - assert next(x, None) is None - - -def test_group_letters_splits_on_file_size_and_file_count(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [ - # ends under max file size but next file is too big - {"Key": "A.pdf", "Size": 1}, - {"Key": "B.pdf", "Size": 2}, - # ends on exactly max number of files and file size - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - # exactly max file size goes in next file - {"Key": "F.pdf", "Size": 5}, - # file size is within max but number of files reaches limit - {"Key": "G.pdf", "Size": 1}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - # whatever's left goes in last list - {"Key": "J.pdf", "Size": 1}, - ] - - with set_config_values( - notify_api, - {"MAX_LETTER_PDF_ZIP_FILESIZE": 5, "MAX_LETTER_PDF_COUNT_PER_ZIP": 3}, - ): - x = group_letters(letters) - - assert next(x) == [{"Key": "A.pdf", "Size": 1}, {"Key": "B.pdf", "Size": 2}] - assert next(x) == [ - {"Key": "C.pdf", "Size": 3}, - {"Key": "D.pdf", "Size": 1}, - {"Key": "E.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "F.pdf", "Size": 5}] - assert next(x) == [ - {"Key": "G.pdf", "Size": 1}, - {"Key": "H.pdf", "Size": 1}, - {"Key": "I.pdf", "Size": 1}, - ] - assert next(x) == [{"Key": "J.pdf", "Size": 1}] - # make sure iterator is exhausted - assert next(x, None) is None - - -def test_group_letters_ignores_non_pdfs(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - letters = [{"Key": "A.zip"}] - assert list(group_letters(letters)) == [] - - -def test_group_letters_ignores_notifications_already_sent(notify_api, mocker): - mock = mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=False) - letters = [{"Key": "A.pdf"}] - assert list(group_letters(letters)) == [] - mock.assert_called_once_with("A.pdf") - - -def test_group_letters_with_no_letters(notify_api, mocker): - mocker.patch("app.celery.letters_pdf_tasks.letter_in_created_state", return_value=True) - assert list(group_letters([])) == [] - - -def test_letter_in_created_state(sample_notification): - sample_notification.reference = "ABCDEF1234567890" - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - - assert letter_in_created_state(filename) is True - - -def test_letter_in_created_state_fails_if_notification_not_in_created( - sample_notification, -): - sample_notification.reference = "ABCDEF1234567890" - sample_notification.status = NOTIFICATION_SENDING - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - assert letter_in_created_state(filename) is False - - -def test_letter_in_created_state_fails_if_notification_doesnt_exist( - sample_notification, -): - sample_notification.reference = "QWERTY1234567890" - filename = "2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF" - assert letter_in_created_state(filename) is False - - -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize( - "key_type,noti_status,bucket_config_name,destination_folder", - [ - ( - KEY_TYPE_NORMAL, - NOTIFICATION_CREATED, - "LETTERS_PDF_BUCKET_NAME", - "2018-01-01/", - ), - (KEY_TYPE_TEST, NOTIFICATION_DELIVERED, "TEST_LETTERS_BUCKET_NAME", ""), - ], -) -def test_process_letter_task_check_virus_scan_passed( - sample_letter_template, - mocker, - key_type, - noti_status, - bucket_config_name, - destination_folder, -): - letter_notification = save_notification( - create_notification( - template=sample_letter_template, - billable_units=0, - status="pending-virus-check", - key_type=key_type, - reference="{} letter".format(key_type), - ) - ) - filename = "NOTIFY.{}".format(letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config[bucket_config_name] - - conn = boto3.resource("s3", region_name="ca-central-1") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"old_pdf") - - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=1) - mock_s3upload = mocker.patch("app.celery.letters_pdf_tasks.s3upload") - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert letter_notification.status == noti_status - assert letter_notification.billable_units == 1 - assert rmock.called - assert rmock.request_history[0].url == endpoint - - mock_s3upload.assert_called_once_with( - bucket_name=target_bucket_name, - filedata=b"new_pdf", - file_location=destination_folder + filename, - region="ca-central-1", - ) - mock_get_page_count.assert_called_once_with(b"old_pdf") - - -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize("key_type", [KEY_TYPE_NORMAL, KEY_TYPE_TEST]) -def test_process_letter_task_check_virus_scan_passed_when_sanitise_fails(sample_letter_notification, mocker, key_type): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - - conn = boto3.resource("s3", region_name="ca-central-1") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_move_s3 = mocker.patch("app.letters.utils._move_s3_object") - mock_sanitise = mocker.patch("app.celery.letters_pdf_tasks._sanitise_precompiled_pdf", return_value=None) - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=2) - - process_virus_scan_passed(filename) - - assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED - assert sample_letter_notification.billable_units == 0 - mock_sanitise.assert_called_once_with(ANY, sample_letter_notification, b"pdf_content") - mock_move_s3.assert_called_once_with(source_bucket_name, filename, target_bucket_name, filename) - - mock_get_page_count.assert_called_once_with(b"pdf_content") - - -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize( - "key_type,notification_status,bucket_config_name", - [ - (KEY_TYPE_NORMAL, NOTIFICATION_CREATED, "LETTERS_PDF_BUCKET_NAME"), - (KEY_TYPE_TEST, NOTIFICATION_DELIVERED, "TEST_LETTERS_BUCKET_NAME"), - ], -) -def test_process_letter_task_check_virus_scan_passed_when_redaction_fails( - sample_letter_notification, - mocker, - key_type, - notification_status, - bucket_config_name, -): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config[bucket_config_name] - - conn = boto3.resource("s3", region_name="eu-west-1") - conn.create_bucket(Bucket=bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_copy_s3 = mocker.patch("app.letters.utils._copy_s3_object") - mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=2) - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "redaction_failed_message": "No matches for address block during redaction procedure", - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert sample_letter_notification.billable_units == 2 - assert sample_letter_notification.status == notification_status - if key_type == KEY_TYPE_NORMAL: - mock_copy_s3.assert_called_once_with(bucket_name, filename, bucket_name, "REDACTION_FAILURE/" + filename) - else: - mock_copy_s3.assert_not_called() - - -@freeze_time("2018-01-01 18:00") -@mock_s3 -@pytest.mark.parametrize("key_type", [KEY_TYPE_NORMAL, KEY_TYPE_TEST]) -def test_process_letter_task_check_virus_scan_passed_when_file_cannot_be_opened(sample_letter_notification, mocker, key_type): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - target_bucket_name = current_app.config["INVALID_PDF_BUCKET_NAME"] - - conn = boto3.resource("s3", region_name="ca-central-1") - conn.create_bucket(Bucket=source_bucket_name) - conn.create_bucket(Bucket=target_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - sample_letter_notification.key_type = key_type - mock_move_s3 = mocker.patch("app.letters.utils._move_s3_object") - - mock_get_page_count = mocker.patch("app.celery.letters_pdf_tasks.get_page_count", side_effect=PdfReadError) - mock_sanitise = mocker.patch("app.celery.letters_pdf_tasks._sanitise_precompiled_pdf") - - process_virus_scan_passed(filename) - - mock_sanitise.assert_not_called() - mock_get_page_count.assert_called_once_with(b"pdf_content") - mock_move_s3.assert_called_once_with(source_bucket_name, filename, target_bucket_name, filename) - assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED - assert sample_letter_notification.billable_units == 0 - - -@mock_s3 -def test_process_virus_scan_passed_logs_error_and_sets_tech_failure_if_s3_error_uploading_to_live_bucket( - mocker, - sample_letter_notification, -): - mock_logger = mocker.patch("app.celery.tasks.current_app.logger.exception") - - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - - source_bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - conn = boto3.resource("s3", region_name="ca-central-1") - conn.create_bucket(Bucket=source_bucket_name) - - s3 = boto3.client("s3", region_name="ca-central-1") - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b"pdf_content") - - mocker.patch("app.celery.letters_pdf_tasks.get_page_count", return_value=1) - - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mocker.patch( - "app.celery.letters_pdf_tasks._upload_pdf_to_test_or_live_pdf_bucket", - side_effect=ClientError(error_response, "operation_name"), - ) - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - process_virus_scan_passed(filename) - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - mock_logger.assert_called_once_with( - "Error uploading letter to live pdf bucket for notification: {}".format(sample_letter_notification.id) - ) - - -def test_move_invalid_letter_and_update_status_logs_error_and_sets_tech_failure_state_if_s3_error( - mocker, - sample_letter_notification, -): - error_response = { - "Error": { - "Code": "InvalidParameterValue", - "Message": "some error message from amazon", - "Type": "Sender", - } - } - mocker.patch( - "app.celery.letters_pdf_tasks.move_scan_to_invalid_pdf_bucket", - side_effect=ClientError(error_response, "operation_name"), - ) - mock_logger = mocker.patch("app.celery.tasks.current_app.logger.exception") - - _move_invalid_letter_and_update_status(sample_letter_notification, "filename", mocker.Mock()) - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - mock_logger.assert_called_once_with( - "Error when moving letter with id {} to invalid PDF bucket".format(sample_letter_notification.id) - ) - - -def test_process_letter_task_check_virus_scan_failed(sample_letter_notification, mocker): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_move_failed_pdf = mocker.patch("app.celery.letters_pdf_tasks.move_failed_pdf") - - with pytest.raises(VirusScanError) as e: - process_virus_scan_failed(filename) - - assert "Virus scan failed:" in str(e) - mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.FAILURE) - assert sample_letter_notification.status == NOTIFICATION_VIRUS_SCAN_FAILED - - -def test_process_letter_task_check_virus_scan_error(sample_letter_notification, mocker): - filename = "NOTIFY.{}".format(sample_letter_notification.reference) - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_move_failed_pdf = mocker.patch("app.celery.letters_pdf_tasks.move_failed_pdf") - - with pytest.raises(VirusScanError) as e: - process_virus_scan_error(filename) - - assert "Virus scan error:" in str(e.value) - mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.ERROR) - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE - - -def test_replay_letters_in_error_for_all_letters_in_error_bucket(notify_api, mocker): - mockObject = boto3.resource("s3").Object("ERROR", "ERROR/file_name") - mocker.patch( - "app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", - return_value=[mockObject], - ) - mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket") - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - replay_letters_in_error() - mock_move.assert_called_once_with("file_name") - mock_celery.assert_called_once_with(name="scan-file", kwargs={"filename": "file_name"}, queue="antivirus-tasks") - - -def test_replay_letters_in_error_for_one_file(notify_api, mocker): - mockObject = boto3.resource("s3").Object("ERROR", "ERROR/file_name") - mocker.patch( - "app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", - return_value=[mockObject], - ) - mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket") - mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task") - replay_letters_in_error("file_name") - mock_move.assert_called_once_with("file_name") - mock_celery.assert_called_once_with(name="scan-file", kwargs={"filename": "file_name"}, queue="antivirus-tasks") - - -def test_sanitise_precompiled_pdf_returns_data_from_template_preview(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"new_pdf").decode("utf-8"), - "validation_passed": True, - "errors": { - "content_outside_of_printable_area": [], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=200, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - response = _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - assert rmock.called - assert rmock.request_history[0].url == endpoint - - assert base64.b64decode(response.json()["file"].encode()) == b"new_pdf" - assert rmock.last_request.text == "old_pdf" - - -def test_sanitise_precompiled_pdf_returns_none_on_validation_error(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - - endpoint = "http://localhost:9999/precompiled/sanitise" - with requests_mock.mock() as rmock: - rmock.request( - "POST", - endpoint, - json={ - "file": base64.b64encode(b"nyan").decode("utf-8"), - "validation_passed": False, - "errors": { - "content_outside_of_printable_area": [1], - "document_not_a4_size_portrait_orientation": [], - }, - }, - status_code=400, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - response = _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - assert rmock.called - assert rmock.request_history[0].url == endpoint - - assert response is None - - -def test_sanitise_precompiled_pdf_passes_the_service_id_and_notification_id_to_template_preview( - mocker, - sample_letter_notification, -): - tp_mock = mocker.patch("app.celery.letters_pdf_tasks.requests_post") - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - mock_celery = Mock(**{"retry.side_effect": Retry}) - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - service_id = str(sample_letter_notification.service_id) - notification_id = str(sample_letter_notification.id) - - tp_mock.assert_called_once_with( - "http://localhost:9999/precompiled/sanitise", - data=b"old_pdf", - headers={ - "Authorization": "Token my-secret-key", - "Service-ID": service_id, - "Notification-ID": notification_id, - }, - ) - - -def test_sanitise_precompiled_pdf_retries_on_http_error(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - rmock.post( - "http://localhost:9999/precompiled/sanitise", - content=b"new_pdf", - status_code=500, - ) - mock_celery = Mock(**{"retry.side_effect": Retry}) - - with pytest.raises(Retry): - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - -def test_sanitise_precompiled_pdf_sets_notification_to_technical_failure_after_too_many_errors(rmock, sample_letter_notification): - sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK - rmock.post( - "http://localhost:9999/precompiled/sanitise", - content=b"new_pdf", - status_code=500, - ) - mock_celery = Mock(**{"retry.side_effect": MaxRetriesExceededError}) - - with pytest.raises(MaxRetriesExceededError): - _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b"old_pdf") - - assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index 6222dfb3ff..7de3d47b74 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -17,8 +17,8 @@ delete_sms_notifications_older_than_retention, letter_raise_alert_if_no_ack_file_for_zip, raise_alert_if_letter_notifications_still_sending, - remove_letter_csv_files, - remove_sms_email_csv_files, + remove_letter_jobs, + remove_sms_email_jobs, remove_transformed_dvla_files, s3, send_daily_performance_platform_stats, @@ -72,11 +72,11 @@ def mock_s3_get_list_diff(bucket_name, subfolder="", suffix="", last_modified=No @freeze_time("2016-10-18T10:00:00") -def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_db_session, mocker, sample_template): +def test_will_archive_jobs_older_than_seven_days(notify_db, notify_db_session, mocker, sample_template): """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") seven_days_ago = datetime.utcnow() - timedelta(days=7) just_under_seven_days = seven_days_ago + timedelta(seconds=1) @@ -91,22 +91,20 @@ def test_will_remove_csv_files_for_jobs_older_than_seven_days(notify_db, notify_ dont_delete_me_1 = create_job(sample_template, created_at=seven_days_ago) create_job(sample_template, created_at=just_under_seven_days) - remove_sms_email_csv_files() + remove_sms_email_jobs() - assert s3.remove_job_from_s3.call_args_list == [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - ] + args = s3.remove_jobs_from_s3.call_args.args[0] + assert sorted(args, key=lambda x: x.id) == sorted([job1_to_delete, job2_to_delete], key=lambda x: x.id) assert job1_to_delete.archived is True assert dont_delete_me_1.archived is False @freeze_time("2016-10-18T10:00:00") -def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, notify_db_session, mocker): +def test_will_archive_jobs_older_than_retention_period(notify_db, notify_db_session, mocker): """ Jobs older than retention period are deleted, but only two day's worth (two-day window) """ - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") service_1 = create_service(service_name="service 1") service_2 = create_service(service_name="service 2") create_service_data_retention(service=service_1, notification_type=SMS_TYPE, days_of_retention=3) @@ -129,22 +127,17 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period(notify_db, n job3_to_delete = create_job(email_template_service_2, created_at=thirty_one_days_ago) job4_to_delete = create_job(sms_template_service_2, created_at=eight_days_ago) - remove_sms_email_csv_files() + remove_sms_email_jobs() - s3.remove_job_from_s3.assert_has_calls( - [ - call(job1_to_delete.service_id, job1_to_delete.id), - call(job2_to_delete.service_id, job2_to_delete.id), - call(job3_to_delete.service_id, job3_to_delete.id), - call(job4_to_delete.service_id, job4_to_delete.id), - ], - any_order=True, + args = s3.remove_jobs_from_s3.call_args.args[0] + assert sorted(args, key=lambda x: x.id) == sorted( + [job1_to_delete, job2_to_delete, job3_to_delete, job4_to_delete], key=lambda x: x.id ) @freeze_time("2017-01-01 10:00:00") -def test_remove_csv_files_filters_by_type(mocker, sample_service): - mocker.patch("app.celery.nightly_tasks.s3.remove_job_from_s3") +def test_archive_jobs_by_type(mocker, sample_service): + mocker.patch("app.celery.nightly_tasks.s3.remove_jobs_from_s3") """ Jobs older than seven days are deleted, but only two day's worth (two-day window) """ @@ -156,11 +149,9 @@ def test_remove_csv_files_filters_by_type(mocker, sample_service): job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) create_job(template=sms_template, created_at=eight_days_ago) - remove_letter_csv_files() + remove_letter_jobs() - assert s3.remove_job_from_s3.call_args_list == [ - call(job_to_delete.service_id, job_to_delete.id), - ] + assert s3.remove_jobs_from_s3.call_args.args[0] == [job_to_delete] def test_should_call_delete_sms_notifications_more_than_week_in_task(notify_api, mocker): @@ -252,8 +243,8 @@ def test_timeout_notifications_sends_status_update_to_service(client, sample_tem ) timeout_notifications() - encrypted_data = create_delivery_status_callback_data(notification, callback_api) - mocked.assert_called_once_with([str(notification.id), encrypted_data], queue=QueueNames.CALLBACKS) + signed_data = create_delivery_status_callback_data(notification, callback_api) + mocked.assert_called_once_with([str(notification.id), signed_data], queue=QueueNames.CALLBACKS) def test_send_daily_performance_stats_calls_does_not_send_if_inactive(client, mocker): diff --git a/tests/app/celery/test_process_pinpoint_receipts_tasks.py b/tests/app/celery/test_process_pinpoint_receipts_tasks.py new file mode 100644 index 0000000000..ea9bfc0654 --- /dev/null +++ b/tests/app/celery/test_process_pinpoint_receipts_tasks.py @@ -0,0 +1,242 @@ +from datetime import datetime + +import pytest +from freezegun import freeze_time + +from app import statsd_client +from app.aws.mocks import ( + pinpoint_delivered_callback, + pinpoint_failed_callback, + pinpoint_shortcode_delivered_callback, + pinpoint_successful_callback, +) +from app.celery.process_pinpoint_receipts_tasks import process_pinpoint_results +from app.dao.notifications_dao import get_notification_by_id +from app.models import ( + NOTIFICATION_DELIVERED, + NOTIFICATION_PERMANENT_FAILURE, + NOTIFICATION_SENT, + NOTIFICATION_TECHNICAL_FAILURE, + NOTIFICATION_TEMPORARY_FAILURE, +) +from app.notifications.callbacks import create_delivery_status_callback_data +from celery.exceptions import MaxRetriesExceededError +from tests.app.conftest import create_sample_notification +from tests.app.db import ( + create_notification, + create_service_callback_api, + save_notification, +) + + +@pytest.mark.parametrize( + "callback, expected_response", + [ + (pinpoint_delivered_callback, "Message has been accepted by phone"), + (pinpoint_shortcode_delivered_callback, "Message has been accepted by phone carrier"), + ], +) +def test_process_pinpoint_results_delivered(sample_template, notify_db, notify_db_session, callback, expected_response, mocker): + mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.info") + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + notification = create_sample_notification( + notify_db, + notify_db_session, + template=sample_template, + reference="ref", + status=NOTIFICATION_SENT, + sent_by="pinpoint", + sent_at=datetime.utcnow(), + ) + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + + process_pinpoint_results(callback(reference="ref")) + + assert mock_callback_task.called_once_with(get_notification_by_id(notification.id)) + assert get_notification_by_id(notification.id).status == NOTIFICATION_DELIVERED + assert get_notification_by_id(notification.id).provider_response == expected_response + + mock_logger.assert_called_once_with(f"Pinpoint callback return status of delivered for notification: {notification.id}") + + +def test_process_pinpoint_results_succeeded(sample_template, notify_db, notify_db_session, mocker): + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + notification = create_sample_notification( + notify_db, + notify_db_session, + template=sample_template, + reference="ref", + status=NOTIFICATION_SENT, + sent_by="pinpoint", + sent_at=datetime.utcnow(), + ) + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + + process_pinpoint_results(pinpoint_successful_callback(reference="ref")) + + assert mock_callback_task.not_called() + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + assert get_notification_by_id(notification.id).provider_response is None + + +@pytest.mark.parametrize( + "provider_response, expected_status, should_log_warning, should_save_provider_response", + [ + ( + "Blocked as spam by phone carrier", + NOTIFICATION_TECHNICAL_FAILURE, + False, + True, + ), + ( + "Phone carrier is currently unreachable/unavailable", + NOTIFICATION_TEMPORARY_FAILURE, + False, + True, + ), + ( + "Phone is currently unreachable/unavailable", + NOTIFICATION_PERMANENT_FAILURE, + False, + True, + ), + ("This is not a real response", NOTIFICATION_TECHNICAL_FAILURE, True, True), + ], +) +def test_process_pinpoint_results_failed( + sample_template, + notify_db, + notify_db_session, + mocker, + provider_response, + expected_status, + should_log_warning, + should_save_provider_response, +): + mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.info") + mock_warning_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.warning") + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + notification = create_sample_notification( + notify_db, + notify_db_session, + template=sample_template, + reference="ref", + status=NOTIFICATION_SENT, + sent_by="pinpoint", + sent_at=datetime.utcnow(), + ) + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + process_pinpoint_results(pinpoint_failed_callback(provider_response=provider_response, reference="ref")) + + assert mock_callback_task.called_once_with(get_notification_by_id(notification.id)) + assert get_notification_by_id(notification.id).status == expected_status + + if should_save_provider_response: + assert get_notification_by_id(notification.id).provider_response == provider_response + else: + assert get_notification_by_id(notification.id).provider_response is None + + mock_logger.assert_called_once_with( + ( + f"Pinpoint delivery failed: notification id {notification.id} and reference ref has error found. " + f"Provider response: {provider_response}" + ) + ) + + assert mock_warning_logger.call_count == int(should_log_warning) + + +def test_pinpoint_callback_should_retry_if_notification_is_missing(notify_db, mocker): + mock_retry = mocker.patch("app.celery.process_pinpoint_receipts_tasks.process_pinpoint_results.retry") + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + process_pinpoint_results(pinpoint_delivered_callback(reference="ref")) + + mock_callback_task.assert_not_called() + assert mock_retry.call_count == 1 + + +def test_pinpoint_callback_should_give_up_after_max_tries(notify_db, mocker): + mocker.patch( + "app.celery.process_pinpoint_receipts_tasks.process_pinpoint_results.retry", + side_effect=MaxRetriesExceededError, + ) + mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.warning") + mock_callback_task = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + process_pinpoint_results(pinpoint_delivered_callback(reference="ref")) is None + mock_callback_task.assert_not_called() + + mock_logger.assert_called_with("notification not found for Pinpoint reference: ref (update to delivered). Giving up.") + + +def test_process_pinpoint_results_retry_called(sample_template, mocker): + save_notification( + create_notification( + sample_template, + reference="ref1", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="pinpoint", + ) + ) + + mocker.patch( + "app.dao.notifications_dao._update_notification_status", + side_effect=Exception("EXPECTED"), + ) + mocked = mocker.patch("app.celery.process_pinpoint_receipts_tasks.process_pinpoint_results.retry") + process_pinpoint_results(response=pinpoint_delivered_callback(reference="ref1")) + assert mocked.call_count == 1 + + +def test_process_pinpoint_results_does_not_process_other_providers(sample_template, mocker): + mock_logger = mocker.patch("app.celery.process_pinpoint_receipts_tasks.current_app.logger.exception") + mock_dao = mocker.patch("app.dao.notifications_dao._update_notification_status") + save_notification( + create_notification( + sample_template, + reference="ref1", + sent_at=datetime.utcnow(), + status=NOTIFICATION_SENT, + sent_by="sns", + ) + ) + + process_pinpoint_results(response=pinpoint_delivered_callback(reference="ref1")) is None + assert mock_logger.called_once_with("") + assert not mock_dao.called + + +def test_process_pinpoint_results_calls_service_callback(sample_template, notify_db_session, notify_db, mocker): + with freeze_time("2021-01-01T12:00:00"): + mocker.patch("app.statsd_client.incr") + mocker.patch("app.statsd_client.timing_with_dates") + mock_send_status = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async") + mock_callback = mocker.patch("app.notifications.callbacks._check_and_queue_callback_task") + + notification = create_sample_notification( + notify_db, + notify_db_session, + template=sample_template, + reference="ref", + status=NOTIFICATION_SENT, + sent_by="pinpoint", + sent_at=datetime.utcnow(), + ) + callback_api = create_service_callback_api(service=sample_template.service, url="https://example.com") + assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT + + process_pinpoint_results(pinpoint_delivered_callback(reference="ref")) + + assert mock_callback.called_once_with(get_notification_by_id(notification.id)) + assert get_notification_by_id(notification.id).status == NOTIFICATION_DELIVERED + assert get_notification_by_id(notification.id).provider_response == "Message has been accepted by phone" + statsd_client.timing_with_dates.assert_any_call("callback.pinpoint.elapsed-time", datetime.utcnow(), notification.sent_at) + statsd_client.incr.assert_any_call("callback.pinpoint.delivered") + updated_notification = get_notification_by_id(notification.id) + signed_data = create_delivery_status_callback_data(updated_notification, callback_api) + mock_send_status.assert_called_once_with([str(notification.id), signed_data], queue="service-callbacks") diff --git a/tests/app/celery/test_process_ses_receipts_tasks.py b/tests/app/celery/test_process_ses_receipts_tasks.py index 22317f2187..706e09424c 100644 --- a/tests/app/celery/test_process_ses_receipts_tasks.py +++ b/tests/app/celery/test_process_ses_receipts_tasks.py @@ -4,7 +4,7 @@ import pytest from freezegun import freeze_time -from app import encryption, statsd_client +from app import bounce_rate_client, signer_complaint, statsd_client from app.aws.mocks import ses_complaint_callback from app.celery.process_ses_receipts_tasks import process_ses_results from app.celery.research_mode_tasks import ( @@ -13,13 +13,29 @@ ses_soft_bounce_callback, ) from app.dao.notifications_dao import get_notification_by_id -from app.models import Complaint, Notification +from app.models import ( + NOTIFICATION_HARD_BOUNCE, + NOTIFICATION_HARD_GENERAL, + NOTIFICATION_HARD_NOEMAIL, + NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST, + NOTIFICATION_HARD_SUPPRESSED, + NOTIFICATION_PERMANENT_FAILURE, + NOTIFICATION_SOFT_ATTACHMENTREJECTED, + NOTIFICATION_SOFT_BOUNCE, + NOTIFICATION_SOFT_CONTENTREJECTED, + NOTIFICATION_SOFT_GENERAL, + NOTIFICATION_SOFT_MAILBOXFULL, + NOTIFICATION_SOFT_MESSAGETOOLARGE, + Complaint, + Notification, +) from app.notifications.callbacks import create_delivery_status_callback_data from app.notifications.notifications_ses_callback import ( remove_emails_from_bounce, remove_emails_from_complaint, ) -from tests.app.conftest import sample_notification as create_sample_notification +from celery.exceptions import MaxRetriesExceededError +from tests.app.conftest import create_sample_notification from tests.app.db import ( create_notification, create_service_callback_api, @@ -108,6 +124,25 @@ def test_ses_callback_should_update_notification_status(notify_db, notify_db_ses send_mock.assert_called_once_with([str(notification.id), encrypted_data], queue="service-callbacks") +def test_ses_callback_dont_change_hard_bounce_status(sample_template, mocker): + with freeze_time("2001-01-01T12:00:00"): + mocker.patch("app.statsd_client.incr") + mocker.patch("app.statsd_client.timing_with_dates") + mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async") + notification = save_notification( + create_notification( + sample_template, + status=NOTIFICATION_PERMANENT_FAILURE, + reference="ref", + ) + ) + notification = get_notification_by_id(notification.id) + assert notification.status == NOTIFICATION_PERMANENT_FAILURE + assert process_ses_results(ses_notification_callback(reference="ref")) + notification = get_notification_by_id(notification.id) + assert notification.status == NOTIFICATION_PERMANENT_FAILURE + + def test_ses_callback_should_update_notification_status_when_receiving_new_delivery_receipt(sample_email_template, mocker): notification = save_notification(create_notification(template=sample_email_template, reference="ref", status="delivered")) @@ -125,31 +160,42 @@ def test_ses_callback_should_retry_if_notification_is_new(notify_db, mocker): assert mock_retry.call_count == 1 -def test_ses_callback_should_log_if_notification_is_missing(notify_db, mocker): - mock_retry = mocker.patch("app.celery.process_ses_receipts_tasks.process_ses_results.retry") - mock_logger = mocker.patch("app.celery.process_ses_receipts_tasks.current_app.logger.warning") +def test_process_ses_receipts_tasks_exception_handling(notify_db, mocker): + reference = "test_reference" + mocker.patch("app.celery.process_ses_receipts_tasks.process_ses_results.retry", side_effect=MaxRetriesExceededError()) + mock_warning = mocker.patch("app.celery.process_ses_receipts_tasks.current_app.logger.error") - with freeze_time("2017-11-17T12:34:03.646Z"): - assert process_ses_results(ses_notification_callback(reference="ref")) is None - assert mock_retry.call_count == 0 - mock_logger.assert_called_once_with("notification not found for reference: ref (update to delivered)") + with pytest.raises(Exception): + process_ses_results(ses_notification_callback(reference)) + assert mock_warning.call_count == 2 + assert "RETRY 5: notification not found for SES reference test_reference." in mock_warning.call_args_list[0][0][0] + assert ( + "notification not found for SES reference: test_reference. Error has persisted > number of retries. Giving up." + in mock_warning.call_args_list[1][0][0] + ) -def test_ses_callback_should_not_retry_if_notification_is_old(notify_db, mocker): +def test_ses_callback_should_retry_if_notification_is_missing(notify_db, mocker): mock_retry = mocker.patch("app.celery.process_ses_receipts_tasks.process_ses_results.retry") - mock_logger = mocker.patch("app.celery.process_ses_receipts_tasks.current_app.logger.error") + assert process_ses_results(ses_notification_callback(reference="ref")) is None + assert mock_retry.call_count == 1 - with freeze_time("2017-11-21T12:14:03.646Z"): - assert process_ses_results(ses_notification_callback(reference="ref")) is None - assert mock_logger.call_count == 0 - assert mock_retry.call_count == 0 + +def test_ses_callback_should_give_up_after_max_tries(notify_db, mocker): + mocker.patch( + "app.celery.process_ses_receipts_tasks.process_ses_results.retry", + side_effect=MaxRetriesExceededError, + ) + mock_logger = mocker.patch("app.celery.process_ses_receipts_tasks.current_app.logger.warning") + + assert process_ses_results(ses_notification_callback(reference="ref")) is None + mock_logger.assert_called_with("notification not found for SES reference: ref. Giving up.") def test_ses_callback_does_not_call_send_delivery_status_if_no_db_entry( notify_db, notify_db_session, sample_email_template, mocker ): with freeze_time("2001-01-01T12:00:00"): - send_mock = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async") notification = create_sample_notification( notify_db, @@ -253,12 +299,7 @@ def test_ses_callback_should_set_status_to_temporary_failure( ], ) def test_ses_callback_should_set_status_to_permanent_failure( - notify_db, - notify_db_session, - sample_email_template, - mocker, - bounce_subtype, - provider_response, + notify_db, notify_db_session, sample_email_template, mocker, bounce_subtype, provider_response ): send_mock = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async") notification = create_sample_notification( @@ -300,7 +341,7 @@ def test_ses_callback_should_send_on_complaint_to_user_callback_api(sample_email assert process_ses_results(response) assert send_mock.call_count == 1 - assert encryption.decrypt(send_mock.call_args[0][0][0]) == { + assert signer_complaint.verify(send_mock.call_args[0][0][0]) == { "complaint_date": "2018-06-05T13:59:58.000000Z", "complaint_id": str(Complaint.query.one().id), "notification_id": str(notification.id), @@ -309,3 +350,87 @@ def test_ses_callback_should_send_on_complaint_to_user_callback_api(sample_email "service_callback_api_url": "https://original_url.com", "to": "recipient1@example.com", } + + +class TestBounceRates: + @pytest.mark.parametrize( + "bounce_subtype, expected_subtype", + [ + ("General", NOTIFICATION_HARD_GENERAL), + ("NoEmail", NOTIFICATION_HARD_NOEMAIL), + ("Suppressed", NOTIFICATION_HARD_SUPPRESSED), + ("OnAccountSuppressionList", NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST), + ], + ) + def test_ses_callback_should_update_bounce_info_new_delivery_receipt_hard_bounce( + self, sample_email_template, mocker, bounce_subtype, expected_subtype + ): + notification = save_notification(create_notification(template=sample_email_template, reference="ref", status="delivered")) + + assert process_ses_results(ses_hard_bounce_callback(reference="ref", bounce_subtype=bounce_subtype)) + assert get_notification_by_id(notification.id).feedback_type == NOTIFICATION_HARD_BOUNCE + assert get_notification_by_id(notification.id).feedback_subtype == expected_subtype + + @pytest.mark.parametrize( + "bounce_subtype, expected_subtype", + [ + ("General", NOTIFICATION_SOFT_GENERAL), + ("MailboxFull", NOTIFICATION_SOFT_MAILBOXFULL), + ("MessageTooLarge", NOTIFICATION_SOFT_MESSAGETOOLARGE), + ("ContentRejected", NOTIFICATION_SOFT_CONTENTREJECTED), + ("AttachmentRejected", NOTIFICATION_SOFT_ATTACHMENTREJECTED), + ], + ) + def test_ses_callback_should_update_bounce_info_new_delivery_receipt_soft_bounce( + self, sample_email_template, mocker, bounce_subtype, expected_subtype + ): + notification = save_notification(create_notification(template=sample_email_template, reference="ref", status="delivered")) + + assert process_ses_results(ses_soft_bounce_callback(reference="ref", bounce_subtype=bounce_subtype)) + assert get_notification_by_id(notification.id).feedback_type == NOTIFICATION_SOFT_BOUNCE + assert get_notification_by_id(notification.id).feedback_subtype == expected_subtype + + @pytest.mark.parametrize( + "bounce_subtype, expected_subtype", + [ + ("General", NOTIFICATION_HARD_GENERAL), + ("NoEmail", NOTIFICATION_HARD_NOEMAIL), + ("Suppressed", NOTIFICATION_HARD_SUPPRESSED), + ("OnAccountSuppressionList", NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST), + ], + ) + def test_ses_callback_should_add_redis_key_when_delivery_receipt_is_hard_bounce( + self, sample_email_template, mocker, bounce_subtype, expected_subtype + ): + mocker.patch("app.bounce_rate_client.set_sliding_hard_bounce") + mocker.patch("app.bounce_rate_client.set_sliding_notifications") + + notification = save_notification(create_notification(template=sample_email_template, reference="ref", status="delivered")) + + assert process_ses_results(ses_hard_bounce_callback(reference="ref", bounce_subtype=bounce_subtype)) + + bounce_rate_client.set_sliding_hard_bounce.assert_called_with(notification.service_id, str(notification.id)) + bounce_rate_client.set_sliding_notifications.assert_not_called() + + @pytest.mark.parametrize( + "bounce_subtype, expected_subtype", + [ + ("General", NOTIFICATION_SOFT_GENERAL), + ("MailboxFull", NOTIFICATION_SOFT_MAILBOXFULL), + ("MessageTooLarge", NOTIFICATION_SOFT_MESSAGETOOLARGE), + ("ContentRejected", NOTIFICATION_SOFT_CONTENTREJECTED), + ("AttachmentRejected", NOTIFICATION_SOFT_ATTACHMENTREJECTED), + ], + ) + def test_ses_callback_should_not_add_redis_keys_when_delivery_receipt_is_soft_bounce( + self, sample_email_template, mocker, bounce_subtype, expected_subtype + ): + mocker.patch("app.bounce_rate_client.set_sliding_hard_bounce") + mocker.patch("app.bounce_rate_client.set_sliding_notifications") + + save_notification(create_notification(template=sample_email_template, reference="ref", status="delivered")) + + assert process_ses_results(ses_soft_bounce_callback(reference="ref", bounce_subtype=bounce_subtype)) + + bounce_rate_client.set_sliding_hard_bounce.assert_not_called() + bounce_rate_client.set_sliding_notifications.assert_not_called() diff --git a/tests/app/celery/test_process_sns_receipts_tasks.py b/tests/app/celery/test_process_sns_receipts_tasks.py index 86e0684e6d..5da017cf67 100644 --- a/tests/app/celery/test_process_sns_receipts_tasks.py +++ b/tests/app/celery/test_process_sns_receipts_tasks.py @@ -15,7 +15,8 @@ NOTIFICATION_TEMPORARY_FAILURE, ) from app.notifications.callbacks import create_delivery_status_callback_data -from tests.app.conftest import sample_notification as create_sample_notification +from celery.exceptions import MaxRetriesExceededError +from tests.app.conftest import create_sample_notification from tests.app.db import ( create_notification, create_service_callback_api, @@ -38,7 +39,7 @@ def test_process_sns_results_delivered(sample_template, notify_db, notify_db_ses assert get_notification_by_id(notification.id).status == NOTIFICATION_SENT assert process_sns_results(sns_success_callback(reference="ref")) assert get_notification_by_id(notification.id).status == NOTIFICATION_DELIVERED - assert get_notification_by_id(notification.id).provider_response is None + assert get_notification_by_id(notification.id).provider_response == "Message has been accepted by phone carrier" mock_logger.assert_called_once_with(f"SNS callback return status of delivered for notification: {notification.id}") @@ -56,15 +57,15 @@ def test_process_sns_results_delivered(sample_template, notify_db, notify_db_ses "Phone carrier is currently unreachable/unavailable", NOTIFICATION_TEMPORARY_FAILURE, False, - False, + True, ), ( "Phone is currently unreachable/unavailable", NOTIFICATION_PERMANENT_FAILURE, False, - False, + True, ), - ("This is not a real response", NOTIFICATION_TECHNICAL_FAILURE, True, False), + ("This is not a real response", NOTIFICATION_TECHNICAL_FAILURE, True, True), ], ) def test_process_sns_results_failed( @@ -108,34 +109,21 @@ def test_process_sns_results_failed( assert mock_warning_logger.call_count == int(should_log_warning) -def test_sns_callback_should_retry_if_notification_is_new(mocker): +def test_sns_callback_should_retry_if_notification_is_missing(notify_db, mocker): mock_retry = mocker.patch("app.celery.process_sns_receipts_tasks.process_sns_results.retry") - mock_logger = mocker.patch("app.celery.process_sns_receipts_tasks.current_app.logger.error") + assert process_sns_results(sns_success_callback(reference="ref")) is None + assert mock_retry.call_count == 1 - with freeze_time("2017-11-17T12:14:03.646Z"): - assert process_sns_results(sns_success_callback(reference="ref", timestamp="2017-11-17T12:14:02.000Z")) is None - assert mock_logger.call_count == 0 - assert mock_retry.call_count == 1 - -def test_sns_callback_should_log_if_notification_is_missing(mocker): - mock_retry = mocker.patch("app.celery.process_sns_receipts_tasks.process_sns_results.retry") +def test_sns_callback_should_give_up_after_max_tries(notify_db, mocker): + mocker.patch( + "app.celery.process_sns_receipts_tasks.process_sns_results.retry", + side_effect=MaxRetriesExceededError, + ) mock_logger = mocker.patch("app.celery.process_sns_receipts_tasks.current_app.logger.warning") - with freeze_time("2017-11-17T12:34:03.646Z"): - assert process_sns_results(sns_success_callback(reference="ref")) is None - assert mock_retry.call_count == 0 - mock_logger.assert_called_once_with("notification not found for reference: ref (update to delivered)") - - -def test_sns_callback_should_not_retry_if_notification_is_old(client, notify_db, mocker): - mock_retry = mocker.patch("app.celery.process_sns_receipts_tasks.process_sns_results.retry") - mock_logger = mocker.patch("app.celery.process_sns_receipts_tasks.current_app.logger.error") - - with freeze_time("2017-11-17T12:16:00.000Z"): # 6 minutes apart and max is 5 minutes - assert process_sns_results(sns_success_callback(reference="ref", timestamp="2017-11-17T12:10:00.000Z")) is None - assert mock_logger.call_count == 0 - assert mock_retry.call_count == 0 + assert process_sns_results(sns_success_callback(reference="ref")) is None + mock_logger.assert_called_with("notification not found for SNS reference: ref (update to delivered). Giving up.") def test_process_sns_results_retry_called(sample_template, mocker): @@ -195,9 +183,9 @@ def test_process_sns_results_calls_service_callback(sample_template, notify_db_s assert process_sns_results(sns_success_callback(reference="ref")) assert get_notification_by_id(notification.id).status == NOTIFICATION_DELIVERED - assert get_notification_by_id(notification.id).provider_response is None + assert get_notification_by_id(notification.id).provider_response == "Message has been accepted by phone carrier" statsd_client.timing_with_dates.assert_any_call("callback.sns.elapsed-time", datetime.utcnow(), notification.sent_at) statsd_client.incr.assert_any_call("callback.sns.delivered") updated_notification = get_notification_by_id(notification.id) - encrypted_data = create_delivery_status_callback_data(updated_notification, callback_api) - send_mock.assert_called_once_with([str(notification.id), encrypted_data], queue="service-callbacks") + signed_data = create_delivery_status_callback_data(updated_notification, callback_api) + send_mock.assert_called_once_with([str(notification.id), signed_data], queue="service-callbacks") diff --git a/tests/app/celery/test_provider_tasks.py b/tests/app/celery/test_provider_tasks.py index aae4707941..e79d8176d3 100644 --- a/tests/app/celery/test_provider_tasks.py +++ b/tests/app/celery/test_provider_tasks.py @@ -64,7 +64,7 @@ def test_should_add_to_retry_queue_if_notification_not_found_in_deliver_sms_task sms_method(notification_id) app.delivery.send_to_providers.send_sms_to_provider.assert_not_called() - getattr(app.celery.provider_tasks, sms_method_name).retry.assert_called_with(queue="retry-tasks", countdown=0) + getattr(app.celery.provider_tasks, sms_method_name).retry.assert_called_with(queue="retry-tasks", countdown=25) def test_should_call_send_email_to_provider_from_deliver_email_task( @@ -87,7 +87,7 @@ def test_should_add_to_retry_queue_if_notification_not_found_in_deliver_email_ta deliver_email(notification_id) app.delivery.send_to_providers.send_email_to_provider.assert_not_called() - app.celery.provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks") + app.celery.provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks", countdown=25) # DO THESE FOR THE 4 TYPES OF TASK @@ -114,7 +114,7 @@ def test_should_go_into_technical_error_if_exceeds_retries_on_deliver_sms_task( sms_method(sample_notification.id) assert str(sample_notification.id) in str(e.value) - getattr(provider_tasks, sms_method_name).retry.assert_called_with(queue="retry-tasks", countdown=0) + getattr(provider_tasks, sms_method_name).retry.assert_called_with(queue="retry-tasks", countdown=300) assert sample_notification.status == "technical-failure" queued_callback.assert_called_once_with(sample_notification) @@ -135,7 +135,7 @@ def test_should_go_into_technical_error_if_exceeds_retries_on_deliver_email_task deliver_email(sample_notification.id) assert str(sample_notification.id) in str(e.value) - provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks") + provider_tasks.deliver_email.retry.assert_called_with(queue="retry-tasks", countdown=300) assert sample_notification.status == "technical-failure" queued_callback.assert_called_once_with(sample_notification) diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index a9f04cf39e..8dfd2f956b 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -23,6 +23,7 @@ from tests.app.db import ( create_letter_rate, create_notification, + create_notification_history, create_rate, create_service, create_template, @@ -94,7 +95,6 @@ def test_create_nightly_billing_for_day_sms_rate_multiplier( billable_units, multiplier, ): - yesterday = convert_utc_to_local_timezone((datetime.now() - timedelta(days=1))).replace(hour=12, minute=00) mocker.patch("app.dao.fact_billing_dao.get_rate", side_effect=mocker_get_rate) @@ -358,7 +358,6 @@ def test_get_rate_for_sms_and_email(notify_db_session): @freeze_time("2018-03-30T05:00:00") # summer time starts on 2018-03-25 def test_create_nightly_billing_for_day_use_BST(sample_service, sample_template, mocker): - mocker.patch("app.dao.fact_billing_dao.get_rate", side_effect=mocker_get_rate) # too late @@ -407,7 +406,6 @@ def test_create_nightly_billing_for_day_use_BST(sample_service, sample_template, @freeze_time("2018-01-15T03:30:00") @pytest.mark.skip(reason="Not in use") def test_create_nightly_billing_for_day_update_when_record_exists(sample_service, sample_template, mocker): - mocker.patch("app.dao.fact_billing_dao.get_rate", side_effect=mocker_get_rate) save_notification( @@ -479,12 +477,13 @@ def test_create_nightly_notification_status_for_day(notify_db_session): ) ) - save_notification(create_notification(template=third_template, status="created")) + save_notification(create_notification(template=third_template, status="created", billable_units=100)) save_notification( create_notification( template=third_template, status="created", created_at=datetime(2019, 1, 1, 12, 0), + billable_units=100, ) ) @@ -498,6 +497,64 @@ def test_create_nightly_notification_status_for_day(notify_db_session): assert new_data[0].bst_date == date(2019, 1, 1) assert new_data[1].bst_date == date(2019, 1, 1) assert new_data[2].bst_date == date(2019, 1, 1) + assert new_data[2].billable_units == 100 + + +@freeze_time("2019-01-05") +def test_ensure_create_nightly_notification_status_for_day_copies_billable_units(notify_db_session): + first_service = create_service(service_name="First Service") + first_template = create_template(service=first_service) + second_service = create_service(service_name="second Service") + second_template = create_template(service=second_service, template_type="email") + + save_notification( + create_notification( + template=first_template, + status="delivered", + created_at=datetime(2019, 1, 1, 12, 0), + billable_units=5, + ) + ) + + save_notification( + create_notification( + template=second_template, + status="temporary-failure", + created_at=datetime(2019, 1, 1, 12, 0), + billable_units=10, + ) + ) + + assert len(FactNotificationStatus.query.all()) == 0 + + create_nightly_notification_status_for_day("2019-01-01") + + new_data = FactNotificationStatus.query.all() + + assert len(new_data) == 2 + assert new_data[0].billable_units == 5 + assert new_data[1].billable_units == 10 + + +@freeze_time("2019-01-05T06:00:00") +def test_ensure_create_nightly_notification_status_for_day_copies_billable_units_from_notificationsHistory(notify_db_session): + first_service = create_service(service_name="First Service") + first_template = create_template(service=first_service) + second_service = create_service(service_name="second Service") + second_template = create_template(service=second_service, template_type="email") + + create_notification_history(template=first_template, billable_units=5) + create_notification_history(template=second_template, billable_units=10) + + assert len(FactNotificationStatus.query.all()) == 0 + + create_nightly_notification_status_for_day("2019-01-05") + + new_data = FactNotificationStatus.query.all() + + assert len(new_data) == 2 + assert new_data[0].billable_units == 5 + assert new_data[1].billable_units == 10 # the job runs at 12:30am London time. 04/01 is in BST. diff --git a/tests/app/celery/test_research_mode_tasks.py b/tests/app/celery/test_research_mode_tasks.py index b5e02ab6d3..b667132f4b 100644 --- a/tests/app/celery/test_research_mode_tasks.py +++ b/tests/app/celery/test_research_mode_tasks.py @@ -8,6 +8,8 @@ from freezegun import freeze_time from app.aws.mocks import ( + pinpoint_delivered_callback, + pinpoint_failed_callback, ses_notification_callback, sns_failed_callback, sns_success_callback, @@ -50,6 +52,30 @@ def test_make_sns_success_callback(notify_api, mocker, phone_number, sns_callbac assert message_celery == sns_callback(**sns_callback_args) +@pytest.mark.parametrize( + "phone_number, pinpoint_callback, pinpoint_callback_args", + [ + ("+15149301630", pinpoint_delivered_callback, {}), + ("+15149301631", pinpoint_delivered_callback, {}), + ("+15149301632", pinpoint_failed_callback, {"provider_response": "Phone is currently unreachable/unavailable"}), + ("+15149301633", pinpoint_failed_callback, {"provider_response": "Phone carrier is currently unreachable/unavailable"}), + ], +) +@freeze_time("2018-01-25 14:00:30") +def test_make_pinpoint_success_callback(notify_api, mocker, phone_number, pinpoint_callback, pinpoint_callback_args): + mock_task = mocker.patch("app.celery.research_mode_tasks.process_pinpoint_results") + some_ref = str(uuid.uuid4()) + now = datetime.now() + timestamp = now.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + + send_sms_response("pinpoint", phone_number, some_ref) + + mock_task.apply_async.assert_called_once_with(ANY, queue=QueueNames.RESEARCH_MODE) + message_celery = mock_task.apply_async.call_args[0][0][0] + pinpoint_callback_args.update({"reference": some_ref, "destination": phone_number, "timestamp": timestamp}) + assert message_celery == pinpoint_callback(**pinpoint_callback_args) + + def test_make_ses_callback(notify_api, mocker): mock_task = mocker.patch("app.celery.research_mode_tasks.process_ses_results") some_ref = str(uuid.uuid4()) diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index fd74861c9b..cceace4f30 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -5,13 +5,21 @@ from freezegun import freeze_time from app import db -from app.celery import scheduled_tasks +from app.celery import scheduled_tasks, tasks from app.celery.scheduled_tasks import ( + beat_inbox_email_bulk, + beat_inbox_email_normal, + beat_inbox_email_priority, + beat_inbox_sms_bulk, + beat_inbox_sms_normal, + beat_inbox_sms_priority, check_job_status, check_precompiled_letter_state, check_templated_letter_state, delete_invitations, delete_verify_codes, + mark_jobs_complete, + recover_expired_notifications, replay_created_notifications, run_scheduled_jobs, send_scheduled_notifications, @@ -32,7 +40,7 @@ NOTIFICATION_PENDING_VIRUS_CHECK, ) from app.v2.errors import JobIncompleteError -from tests.app.conftest import sample_job as create_sample_job +from tests.app.conftest import create_sample_job from tests.app.db import ( create_job, create_notification, @@ -78,7 +86,7 @@ def test_should_call_delete_invotations_on_delete_invitations_task(notify_api, m def test_should_update_scheduled_jobs_and_put_on_queue(notify_db, notify_db_session, mocker): - mocked = mocker.patch("app.celery.tasks.process_job.apply_async") + mocked_process_job = mocker.patch("app.celery.tasks.process_job.apply_async") one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) job = create_sample_job( @@ -92,11 +100,11 @@ def test_should_update_scheduled_jobs_and_put_on_queue(notify_db, notify_db_sess updated_job = dao_get_job_by_id(job.id) assert updated_job.job_status == "pending" - mocked.assert_called_with([str(job.id)], queue="job-tasks") + mocked_process_job.assert_called_with([str(job.id)], queue="job-tasks") def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_session, mocker): - mocked = mocker.patch("app.celery.tasks.process_job.apply_async") + mocked_process_job = mocker.patch("app.celery.tasks.process_job.apply_async") one_minute_in_the_past = datetime.utcnow() - timedelta(minutes=1) ten_minutes_in_the_past = datetime.utcnow() - timedelta(minutes=10) @@ -126,7 +134,7 @@ def test_should_update_all_scheduled_jobs_and_put_on_queue(notify_db, notify_db_ assert dao_get_job_by_id(job_2.id).job_status == "pending" assert dao_get_job_by_id(job_2.id).job_status == "pending" - mocked.assert_has_calls( + mocked_process_job.assert_has_calls( [ call([str(job_3.id)], queue="job-tasks"), call([str(job_2.id)], queue="job-tasks"), @@ -176,18 +184,18 @@ def test_should_send_all_scheduled_notifications_to_deliver_queue(sample_templat send_scheduled_notifications() - mocked.apply_async.assert_called_once_with([str(message_to_deliver.id)], queue="send-sms-tasks") + mocked.apply_async.assert_called_once_with([str(message_to_deliver.id)], queue=QueueNames.SEND_SMS_MEDIUM) scheduled_notifications = dao_get_scheduled_notifications() assert not scheduled_notifications def test_check_job_status_task_raises_job_incomplete_error(mocker, sample_template): mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") + mocker.patch("app.celery.scheduled_tasks.update_in_progress_jobs") job = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(minutes=121), - processing_started=datetime.utcnow() - timedelta(minutes=121), + updated_at=datetime.utcnow() - timedelta(minutes=31), job_status=JOB_STATUS_IN_PROGRESS, ) save_notification(create_notification(template=sample_template, job=job)) @@ -204,12 +212,13 @@ def test_check_job_status_task_raises_job_incomplete_error(mocker, sample_templa def test_check_job_status_task_raises_job_incomplete_error_when_scheduled_job_is_not_complete(mocker, sample_template): mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") + mocker.patch("app.celery.scheduled_tasks.update_in_progress_jobs") job = create_job( template=sample_template, notification_count=3, created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=121), - processing_started=datetime.utcnow() - timedelta(minutes=121), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + updated_at=datetime.utcnow() - timedelta(minutes=31), job_status=JOB_STATUS_IN_PROGRESS, ) with pytest.raises(expected_exception=JobIncompleteError) as e: @@ -225,20 +234,19 @@ def test_check_job_status_task_raises_job_incomplete_error_when_scheduled_job_is def test_check_job_status_task_raises_job_incomplete_error_for_multiple_jobs(mocker, sample_template): mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") + mocker.patch("app.celery.scheduled_tasks.update_in_progress_jobs") job = create_job( template=sample_template, notification_count=3, created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=121), - processing_started=datetime.utcnow() - timedelta(minutes=121), + updated_at=datetime.utcnow() - timedelta(minutes=31), job_status=JOB_STATUS_IN_PROGRESS, ) job_2 = create_job( template=sample_template, notification_count=3, created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=121), - processing_started=datetime.utcnow() - timedelta(minutes=121), + updated_at=datetime.utcnow() - timedelta(minutes=31), job_status=JOB_STATUS_IN_PROGRESS, ) with pytest.raises(expected_exception=JobIncompleteError) as e: @@ -255,19 +263,18 @@ def test_check_job_status_task_raises_job_incomplete_error_for_multiple_jobs(moc def test_check_job_status_task_only_sends_old_tasks(mocker, sample_template): mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") + mocker.patch("app.celery.scheduled_tasks.update_in_progress_jobs") job = create_job( template=sample_template, notification_count=3, created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=121), - processing_started=datetime.utcnow() - timedelta(minutes=121), + updated_at=datetime.utcnow() - timedelta(minutes=31), job_status=JOB_STATUS_IN_PROGRESS, ) job_2 = create_job( template=sample_template, notification_count=3, - created_at=datetime.utcnow() - timedelta(minutes=121), - processing_started=datetime.utcnow() - timedelta(minutes=119), + updated_at=datetime.utcnow() - timedelta(minutes=28), job_status=JOB_STATUS_IN_PROGRESS, ) with pytest.raises(expected_exception=JobIncompleteError) as e: @@ -285,19 +292,19 @@ def test_check_job_status_task_only_sends_old_tasks(mocker, sample_template): def test_check_job_status_task_sets_jobs_to_error(mocker, sample_template): mock_celery = mocker.patch("app.celery.tasks.notify_celery.send_task") + mocker.patch("app.celery.scheduled_tasks.update_in_progress_jobs") job = create_job( template=sample_template, notification_count=3, created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=121), - processing_started=datetime.utcnow() - timedelta(minutes=121), + updated_at=datetime.utcnow() - timedelta(minutes=31), job_status=JOB_STATUS_IN_PROGRESS, ) job_2 = create_job( template=sample_template, notification_count=3, created_at=datetime.utcnow() - timedelta(minutes=121), - processing_started=datetime.utcnow() - timedelta(minutes=119), + updated_at=datetime.utcnow() - timedelta(minutes=28), job_status=JOB_STATUS_IN_PROGRESS, ) with pytest.raises(expected_exception=JobIncompleteError) as e: @@ -356,8 +363,8 @@ def test_replay_created_notifications(notify_db_session, sample_service, mocker) save_notification(create_notification(template=email_template, created_at=datetime.utcnow(), status="created")) replay_created_notifications() - email_delivery_queue.assert_called_once_with([str(old_email.id)], queue="send-email-tasks") - sms_delivery_queue.assert_called_once_with([str(old_sms.id)], queue="send-sms-tasks") + email_delivery_queue.assert_called_once_with([str(old_email.id)], queue=QueueNames.SEND_EMAIL_MEDIUM) + sms_delivery_queue.assert_called_once_with([str(old_sms.id)], queue=QueueNames.SEND_SMS_MEDIUM) def test_check_job_status_task_does_not_raise_error(sample_template): @@ -494,3 +501,124 @@ def test_check_templated_letter_state_during_utc(mocker, sample_letter_template) subject="[test] Letters still in 'created' status", ticket_type="incident", ) + + +class TestHeartbeatQueues: + def test_beat_inbox_sms_normal(self, notify_api, mocker): + mocker.patch("app.celery.tasks.current_app.logger.info") + mocker.patch("app.sms_normal.poll", side_effect=[("rec123", ["1", "2", "3", "4"]), ("hello", [])]) + mocker.patch("app.celery.tasks.save_smss.apply_async") + + beat_inbox_sms_normal() + + tasks.save_smss.apply_async.assert_called_once_with( + (None, ["1", "2", "3", "4"], "rec123"), + queue="-normal-database-tasks", + ) + + def test_beat_inbox_sms_bulk(self, notify_api, mocker): + mocker.patch("app.celery.tasks.current_app.logger.info") + mocker.patch("app.sms_bulk.poll", side_effect=[("rec123", ["1", "2", "3", "4"]), ("hello", [])]) + mocker.patch("app.celery.tasks.save_smss.apply_async") + + beat_inbox_sms_bulk() + + tasks.save_smss.apply_async.assert_called_once_with( + (None, ["1", "2", "3", "4"], "rec123"), + queue="-bulk-database-tasks", + ) + + def test_beat_inbox_sms_priority(self, notify_api, mocker): + mocker.patch("app.celery.tasks.current_app.logger.info") + mocker.patch("app.sms_priority.poll", side_effect=[("rec123", ["1", "2", "3", "4"]), ("hello", [])]) + mocker.patch("app.celery.tasks.save_smss.apply_async") + + beat_inbox_sms_priority() + + tasks.save_smss.apply_async.assert_called_once_with( + (None, ["1", "2", "3", "4"], "rec123"), + queue="-priority-database-tasks.fifo", + ) + + def test_beat_inbox_email_normal(self, notify_api, mocker): + mocker.patch("app.celery.tasks.current_app.logger.info") + mocker.patch("app.email_normal.poll", side_effect=[("rec123", ["1", "2", "3", "4"]), ("hello", [])]) + mocker.patch("app.celery.tasks.save_emails.apply_async") + + beat_inbox_email_normal() + + tasks.save_emails.apply_async.assert_called_once_with( + (None, ["1", "2", "3", "4"], "rec123"), + queue="-normal-database-tasks", + ) + + def test_beat_inbox_email_bulk(self, notify_api, mocker): + mocker.patch("app.celery.tasks.current_app.logger.info") + mocker.patch("app.email_bulk.poll", side_effect=[("rec123", ["1", "2", "3", "4"]), ("hello", [])]) + mocker.patch("app.celery.tasks.save_emails.apply_async") + + beat_inbox_email_bulk() + + tasks.save_emails.apply_async.assert_called_once_with( + (None, ["1", "2", "3", "4"], "rec123"), + queue="-bulk-database-tasks", + ) + + def test_beat_inbox_email_priority(self, notify_api, mocker): + mocker.patch("app.celery.tasks.current_app.logger.info") + mocker.patch("app.email_priority.poll", side_effect=[("rec123", ["1", "2", "3", "4"]), ("hello", [])]) + mocker.patch("app.celery.tasks.save_emails.apply_async") + + beat_inbox_email_priority() + + tasks.save_emails.apply_async.assert_called_once_with( + (None, ["1", "2", "3", "4"], "rec123"), + queue="-priority-database-tasks.fifo", + ) + + +class TestRecoverExpiredNotification: + def test_recover_expired_notifications(self, mocker, notify_api): + sms_bulk = mocker.patch("app.sms_bulk.expire_inflights") + sms_normal = mocker.patch("app.sms_normal.expire_inflights") + sms_priority = mocker.patch("app.sms_priority.expire_inflights") + email_bulk = mocker.patch("app.email_bulk.expire_inflights") + email_normal = mocker.patch("app.email_normal.expire_inflights") + email_priority = mocker.patch("app.email_priority.expire_inflights") + + recover_expired_notifications() + + sms_bulk.assert_called_once() + sms_normal.assert_called_once() + sms_priority.assert_called_once() + email_bulk.assert_called_once() + email_normal.assert_called_once() + email_priority.assert_called_once() + + +@pytest.mark.parametrize( + "notification_count_in_job, notification_count_in_db, initial_status, expected_status", + [ + [3, 0, JOB_STATUS_IN_PROGRESS, JOB_STATUS_IN_PROGRESS], + [3, 1, JOB_STATUS_IN_PROGRESS, JOB_STATUS_IN_PROGRESS], + [3, 1, JOB_STATUS_ERROR, JOB_STATUS_ERROR], + [3, 3, JOB_STATUS_ERROR, JOB_STATUS_FINISHED], + [3, 3, JOB_STATUS_IN_PROGRESS, JOB_STATUS_FINISHED], + [3, 10, JOB_STATUS_IN_PROGRESS, JOB_STATUS_FINISHED], + ], +) +def test_mark_jobs_complete( + sample_template, notification_count_in_job, notification_count_in_db, initial_status, expected_status +): + job = create_job( + template=sample_template, + notification_count=notification_count_in_job, + created_at=datetime.utcnow() - timedelta(minutes=1), + processing_started=datetime.utcnow() - timedelta(minutes=1), + job_status=initial_status, + ) + for _ in range(notification_count_in_db): + save_notification(create_notification(template=sample_template, job=job)) + + mark_jobs_complete() + assert job.job_status == expected_status diff --git a/tests/app/celery/test_service_callback_tasks.py b/tests/app/celery/test_service_callback_tasks.py index 538cba4874..eda0e212d8 100644 --- a/tests/app/celery/test_service_callback_tasks.py +++ b/tests/app/celery/test_service_callback_tasks.py @@ -5,7 +5,7 @@ import requests_mock from freezegun import freeze_time -from app import DATETIME_FORMAT, encryption +from app import DATETIME_FORMAT, signer_complaint, signer_delivery_status from app.celery.service_callback_tasks import ( send_complaint_to_service, send_delivery_status_to_service, @@ -21,8 +21,7 @@ @pytest.mark.parametrize("notification_type", ["email", "letter", "sms"]) -def test_send_delivery_status_to_service_post_https_request_to_service_with_encrypted_data(notify_db_session, notification_type): - +def test_send_delivery_status_to_service_post_https_request_to_service_with_signed_data(notify_db_session, notification_type): callback_api, template = _set_up_test_data(notification_type, "delivery_status") datestr = datetime(2017, 6, 20) @@ -35,16 +34,17 @@ def test_send_delivery_status_to_service_post_https_request_to_service_with_encr status="sent", ) ) - encrypted_status_update = _set_up_data_for_status_update(callback_api, notification) + signed_status_update = _set_up_data_for_status_update(callback_api, notification) with requests_mock.Mocker() as request_mock: request_mock.post(callback_api.url, json={}, status_code=200) - send_delivery_status_to_service(notification.id, encrypted_status_update=encrypted_status_update) + send_delivery_status_to_service(notification.id, signed_status_update=signed_status_update) mock_data = { "id": str(notification.id), "reference": notification.client_reference, "to": notification.to, "status": notification.status, + "status_description": notification.formatted_status, "provider_response": notification.provider_response, "created_at": datestr.strftime(DATETIME_FORMAT), "completed_at": datestr.strftime(DATETIME_FORMAT), @@ -60,7 +60,7 @@ def test_send_delivery_status_to_service_post_https_request_to_service_with_encr assert request_mock.request_history[0].headers["Authorization"] == "Bearer {}".format(callback_api.bearer_token) -def test_send_complaint_to_service_posts_https_request_to_service_with_encrypted_data( +def test_send_complaint_to_service_posts_https_request_to_service_with_signed_data( notify_db_session, ): with freeze_time("2001-01-01T12:00:00"): @@ -90,8 +90,9 @@ def test_send_complaint_to_service_posts_https_request_to_service_with_encrypted @pytest.mark.parametrize("notification_type", ["email", "letter", "sms"]) -def test__send_data_to_service_callback_api_retries_if_request_returns_500_with_encrypted_data( - notify_db_session, mocker, notification_type +@pytest.mark.parametrize("status_code", [429, 500, 503]) +def test__send_data_to_service_callback_api_retries_if_request_returns_error_code_with_signed_data( + notify_db_session, mocker, notification_type, status_code ): callback_api, template = _set_up_test_data(notification_type, "delivery_status") datestr = datetime(2017, 6, 20) @@ -104,18 +105,18 @@ def test__send_data_to_service_callback_api_retries_if_request_returns_500_with_ status="sent", ) ) - encrypted_data = _set_up_data_for_status_update(callback_api, notification) + signed_data = _set_up_data_for_status_update(callback_api, notification) mocked = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.retry") with requests_mock.Mocker() as request_mock: - request_mock.post(callback_api.url, json={}, status_code=500) - send_delivery_status_to_service(notification.id, encrypted_status_update=encrypted_data) + request_mock.post(callback_api.url, json={}, status_code=status_code) + send_delivery_status_to_service(notification.id, signed_status_update=signed_data) assert mocked.call_count == 1 - assert mocked.call_args[1]["queue"] == "retry-tasks" + assert mocked.call_args[1]["queue"] == "service-callbacks-retry" @pytest.mark.parametrize("notification_type", ["email", "letter", "sms"]) -def test__send_data_to_service_callback_api_does_not_retry_if_request_returns_404_with_encrypted_data( +def test__send_data_to_service_callback_api_does_not_retry_if_request_returns_404_with_signed_data( notify_db_session, mocker, notification_type ): callback_api, template = _set_up_test_data(notification_type, "delivery_status") @@ -129,11 +130,11 @@ def test__send_data_to_service_callback_api_does_not_retry_if_request_returns_40 status="sent", ) ) - encrypted_data = _set_up_data_for_status_update(callback_api, notification) + signed_data = _set_up_data_for_status_update(callback_api, notification) mocked = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.retry") with requests_mock.Mocker() as request_mock: request_mock.post(callback_api.url, json={}, status_code=404) - send_delivery_status_to_service(notification.id, encrypted_status_update=encrypted_data) + send_delivery_status_to_service(notification.id, signed_status_update=signed_data) assert mocked.call_count == 0 @@ -150,11 +151,11 @@ def test_send_delivery_status_to_service_succeeds_if_sent_at_is_none(notify_db_s status="technical-failure", ) ) - encrypted_data = _set_up_data_for_status_update(callback_api, notification) + signed_data = _set_up_data_for_status_update(callback_api, notification) mocked = mocker.patch("app.celery.service_callback_tasks.send_delivery_status_to_service.retry") with requests_mock.Mocker() as request_mock: request_mock.post(callback_api.url, json={}, status_code=404) - send_delivery_status_to_service(notification.id, encrypted_status_update=encrypted_data) + send_delivery_status_to_service(notification.id, signed_status_update=signed_data) assert mocked.call_count == 0 @@ -177,6 +178,7 @@ def _set_up_data_for_status_update(callback_api, notification): "notification_client_reference": notification.client_reference, "notification_to": notification.to, "notification_status": notification.status, + "notification_status_description": notification.formatted_status, "notification_provider_response": notification.provider_response, "notification_created_at": notification.created_at.strftime(DATETIME_FORMAT), "notification_updated_at": notification.updated_at.strftime(DATETIME_FORMAT) if notification.updated_at else None, @@ -185,8 +187,8 @@ def _set_up_data_for_status_update(callback_api, notification): "service_callback_api_url": callback_api.url, "service_callback_api_bearer_token": callback_api.bearer_token, } - encrypted_status_update = encryption.encrypt(data) - return encrypted_status_update + signed_status_update = signer_delivery_status.sign(data) + return signed_status_update def _set_up_data_for_complaint(callback_api, complaint, notification): @@ -199,5 +201,5 @@ def _set_up_data_for_complaint(callback_api, complaint, notification): "service_callback_api_url": callback_api.url, "service_callback_api_bearer_token": callback_api.bearer_token, } - obscured_status_update = encryption.encrypt(data) + obscured_status_update = signer_complaint.sign(data) return obscured_status_update diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 5648e1197f..aeeb6c8c76 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -1,7 +1,8 @@ import json import uuid from datetime import datetime, timedelta -from unittest.mock import Mock, call +from unittest import mock +from unittest.mock import MagicMock, Mock, call import pytest import requests_mock @@ -10,49 +11,57 @@ from notifications_utils.recipients import RecipientCSV from notifications_utils.template import SMSMessageTemplate, WithSubjectTemplate from requests import RequestException -from sqlalchemy.exc import SQLAlchemyError - -from app import DATETIME_FORMAT, encryption, redis_store +from sqlalchemy.exc import IntegrityError, SQLAlchemyError + +from app import ( + DATETIME_FORMAT, + bounce_rate_client, + redis_store, + signer_notification, + signer_personalisation, +) from app.celery import provider_tasks, tasks from app.celery.tasks import ( + acknowledge_receipt, + choose_database_queue, get_template_class, + handle_batch_error_and_forward, process_incomplete_job, process_incomplete_jobs, process_job, - process_returned_letters_list, - process_row, + process_rows, s3, - save_email, - save_letter, - save_sms, + save_emails, + save_smss, + seed_bounce_rate_in_redis, send_inbound_sms_to_service, send_notify_no_reply, + update_in_progress_jobs, ) from app.config import QueueNames from app.dao import jobs_dao, service_email_reply_to_dao, service_sms_sender_dao +from app.dao.services_dao import dao_fetch_service_by_id from app.models import ( + BULK, EMAIL_TYPE, JOB_STATUS_ERROR, - JOB_STATUS_FINISHED, - JOB_STATUS_IN_PROGRESS, KEY_TYPE_NORMAL, LETTER_TYPE, + NORMAL, + PRIORITY, SMS_TYPE, - Job, Notification, - NotificationHistory, ServiceEmailReplyTo, ServiceSmsSender, ) from app.schemas import service_schema, template_schema from celery.exceptions import Retry from tests.app import load_example_csv +from tests.app.conftest import create_sample_service, create_sample_template from tests.app.db import ( create_inbound_sms, create_job, - create_letter_contact, create_notification, - create_notification_history, create_reply_to_email, create_service, create_service_inbound_api, @@ -69,7 +78,7 @@ def __eq__(self, other): return self in other -def _notification_json(template, to, personalisation=None, job_id=None, row_number=0, queue=None): +def _notification_json(template, to, personalisation=None, job_id=None, row_number=0, queue=None, reply_to_text=None): return { "template": str(template.id), "template_version": template.version, @@ -79,1431 +88,1775 @@ def _notification_json(template, to, personalisation=None, job_id=None, row_numb "job": job_id and str(job_id), "row_number": row_number, "queue": queue, + "reply_to_text": reply_to_text, } def test_should_have_decorated_tasks_functions(): assert process_job.__wrapped__.__name__ == "process_job" - assert save_sms.__wrapped__.__name__ == "save_sms" - assert save_email.__wrapped__.__name__ == "save_email" - assert save_letter.__wrapped__.__name__ == "save_letter" - - -@pytest.fixture -def email_job_with_placeholders(notify_db, notify_db_session, sample_email_template_with_placeholders): - return create_job(template=sample_email_template_with_placeholders) - - -# -------------- process_job tests -------------- # - - -def test_should_process_sms_job(sample_job, mocker): - mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=load_example_csv("sms")) - mocker.patch("app.celery.tasks.save_sms.apply_async") - mocker.patch("app.encryption.encrypt", return_value="something_encrypted") - mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") - - redis_mock = mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") - - process_job(sample_job.id) - s3.get_job_from_s3.assert_called_once_with(str(sample_job.service.id), str(sample_job.id)) - assert encryption.encrypt.call_args[0][0]["to"] == "+441234123123" - assert encryption.encrypt.call_args[0][0]["template"] == str(sample_job.template.id) - assert encryption.encrypt.call_args[0][0]["template_version"] == sample_job.template.version - assert encryption.encrypt.call_args[0][0]["personalisation"] == {"phonenumber": "+441234123123"} - assert encryption.encrypt.call_args[0][0]["row_number"] == 0 - tasks.save_sms.apply_async.assert_called_once_with( - (str(sample_job.service_id), "uuid", "something_encrypted"), - {}, - queue="database-tasks", - ) - job = jobs_dao.dao_get_job_by_id(sample_job.id) - assert job.job_status == "finished" - assert job.processing_started is not None - assert job.created_at is not None - redis_mock.assert_called_once_with("job.processing-start-delay", job.processing_started, job.created_at) - - -def test_should_process_sms_job_with_sender_id(sample_template, mocker, fake_uuid): - job = create_job(template=sample_template, sender_id=fake_uuid) - mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=load_example_csv("sms")) - mocker.patch("app.celery.tasks.save_sms.apply_async") - mocker.patch("app.encryption.encrypt", return_value="something_encrypted") - mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") - - process_job(job.id) - - tasks.save_sms.apply_async.assert_called_once_with( - (str(job.service_id), "uuid", "something_encrypted"), - {"sender_id": fake_uuid}, - queue="database-tasks", - ) - - -@pytest.mark.parametrize( - "csv_threshold, expected_queue", - [ - (0, "bulk-tasks"), - (1_000, None), - ], -) -def test_should_redirect_job_to_queue_depending_on_csv_threshold( - notify_api, sample_job, mocker, fake_uuid, csv_threshold, expected_queue -): - mock_save_email = mocker.patch("app.celery.tasks.save_email") - mocker.patch("app.service.utils.safelisted_members", return_value=None) - - template = Mock(id=1, template_type=EMAIL_TYPE) - job = Mock(id=1, template_version="temp_vers", notification_count=1) - service = Mock(id=1, research_mode=False) - - row = next( - RecipientCSV( - load_example_csv("email"), - template_type=EMAIL_TYPE, - ).get_rows() - ) + assert save_smss.__wrapped__.__name__ == "save_smss" + assert save_emails.__wrapped__.__name__ == "save_emails" - with set_config_values(notify_api, {"CSV_BULK_REDIRECT_THRESHOLD": csv_threshold}): - process_row(row, template, job, service) - tasks.save_email.apply_async.assert_called_once() - args = mock_save_email.method_calls[0].args - encrypted_notification = args[0][2] - notification = encryption.decrypt(encrypted_notification) - assert expected_queue == notification.get("queue") +class TestAcknowledgeReceipt: + def test_acknowledge_happy_path(self, mocker): + receipt = uuid.uuid4() + acknowledge_sms_normal_mock = mocker.patch("app.sms_normal.acknowledge", return_value=True) + acknowledge_sms_priority_mock = mocker.patch("app.sms_bulk.acknowledge", return_value=False) + acknowledge_receipt(SMS_TYPE, NORMAL, receipt) + assert acknowledge_sms_normal_mock.called_once_with(receipt) + assert acknowledge_sms_priority_mock.not_called() + def test_acknowledge_wrong_queue(self, mocker, notify_api): + receipt = uuid.uuid4() + acknowledge_sms_bulk_mock = mocker.patch("app.sms_bulk.acknowledge", return_value=True) + acknowledge_receipt(EMAIL_TYPE, NORMAL, receipt) + assert acknowledge_sms_bulk_mock.called_once_with(receipt) -@freeze_time("2016-01-01 11:09:00.061258") -def test_should_not_process_sms_job_if_would_exceed_send_limits(notify_db_session, mocker): - service = create_service(message_limit=9) - template = create_template(service=service) - job = create_job(template=template, notification_count=10, original_file_name="multiple_sms.csv") - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_sms"), - ) - mocker.patch("app.celery.tasks.process_row") - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == "sending limits exceeded" - assert s3.get_job_from_s3.called is False - assert tasks.process_row.called is False - - -def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(notify_db_session, mocker): - service = create_service(message_limit=1) - template = create_template(service=service) - job = create_job(template=template) - - save_notification(create_notification(template=template, job=job)) - - mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=load_example_csv("sms")) - mocker.patch("app.celery.tasks.process_row") - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == "sending limits exceeded" - assert s3.get_job_from_s3.called is False - assert tasks.process_row.called is False - - -@pytest.mark.parametrize("template_type", ["sms", "email"]) -def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(notify_db_session, template_type, mocker): - service = create_service(message_limit=1) - template = create_template(service=service, template_type=template_type) - job = create_job(template=template) + def test_acknowledge_no_queue(self): + with pytest.raises(ValueError): + acknowledge_receipt(None, None, uuid.uuid4()) - save_notification(create_notification(template=template, job=job)) - mocker.patch("app.celery.tasks.s3.get_job_from_s3") - mocker.patch("app.celery.tasks.process_row") - - process_job(job.id) - - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == "sending limits exceeded" - assert s3.get_job_from_s3.called is False - assert tasks.process_row.called is False +@pytest.fixture +def email_job_with_placeholders(notify_db, notify_db_session, sample_email_template_with_placeholders): + return create_job(template=sample_email_template_with_placeholders) -def test_should_not_process_job_if_already_pending(sample_template, mocker): - job = create_job(template=sample_template, job_status="scheduled") +class TestChooseDatabaseQueue: + @pytest.mark.parametrize( + "research_mode,template_priority,notification_count,expected_queue", + [ + (True, PRIORITY, 20, QueueNames.RESEARCH_MODE), + (True, NORMAL, 20, QueueNames.RESEARCH_MODE), + (True, BULK, 201, QueueNames.RESEARCH_MODE), + (False, PRIORITY, 1, QueueNames.PRIORITY_DATABASE), + (False, PRIORITY, 19, QueueNames.PRIORITY_DATABASE), + (False, PRIORITY, 20, QueueNames.PRIORITY_DATABASE), + (False, PRIORITY, 199, QueueNames.PRIORITY_DATABASE), + (False, PRIORITY, 200, QueueNames.BULK_DATABASE), + (False, NORMAL, 199, QueueNames.NORMAL_DATABASE), + (False, NORMAL, 200, QueueNames.BULK_DATABASE), + (False, NORMAL, 201, QueueNames.BULK_DATABASE), + (False, BULK, 201, QueueNames.BULK_DATABASE), + ], + ) + def test_choose_database_queue_FF_PRIORITY_LANES_true( + self, + mocker, + notify_db, + notify_db_session, + notify_api, + research_mode, + template_priority, + notification_count, + expected_queue, + ): + service = create_sample_service(notify_db, notify_db_session, research_mode=research_mode) + template = create_sample_template(notify_db, notify_db_session, process_type=template_priority) + + actual_queue = choose_database_queue(template.process_type, service.research_mode, notification_count) + assert expected_queue == actual_queue + + +@pytest.mark.usefixtures("notify_db_session") +class TestBatchSaving: + def test_save_emails(self, notify_db_session, mocker): + service = create_service(research_mode=True) + + template = create_template(service=service, template_type="email") + + notification1 = _notification_json(template, to="test1@test.com") + notification2 = _notification_json(template, to="test2@test.com") + notification3 = _notification_json(template, to="test3@test.com") + + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + + save_emails( + str(template.service_id), + [ + signer_notification.sign(notification1), + signer_notification.sign(notification2), + signer_notification.sign(notification3), + ], + None, + ) - mocker.patch("app.celery.tasks.s3.get_job_from_s3") - mocker.patch("app.celery.tasks.process_row") + persisted_notification = Notification.query.all() + assert persisted_notification[0].to == "test1@test.com" + assert persisted_notification[1].to == "test2@test.com" + assert persisted_notification[2].to == "test3@test.com" + assert persisted_notification[0].template_id == template.id + assert persisted_notification[1].template_version == template.version + assert persisted_notification[0].status == "created" + assert persisted_notification[0].notification_type == "email" + + def test_should_save_smss(self, sample_template_with_placeholders, mocker): + notification1 = _notification_json( + sample_template_with_placeholders, + to="+1 650 253 2221", + personalisation={"name": "Jo"}, + ) + notification1_id = uuid.uuid4() + notification1["id"] = str(notification1_id) - process_job(job.id) + notification2 = _notification_json( + sample_template_with_placeholders, to="+1 650 253 2222", personalisation={"name": "Test2"} + ) - assert s3.get_job_from_s3.called is False - assert tasks.process_row.called is False + notification3 = _notification_json( + sample_template_with_placeholders, to="+1 650 253 2223", personalisation={"name": "Test3"} + ) + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + acknowledge_mock = mocker.patch("app.sms_normal.acknowledge") + + receipt = uuid.uuid4() + save_smss( + str(sample_template_with_placeholders.service.id), + [ + signer_notification.sign(notification1), + signer_notification.sign(notification2), + signer_notification.sign(notification3), + ], + receipt, + ) -def test_should_process_email_job_if_exactly_on_send_limits(notify_db_session, mocker): - service = create_service(message_limit=10) - template = create_template(service=service, template_type="email") - job = create_job(template=template, notification_count=10) + persisted_notification = Notification.query.all() + assert persisted_notification[0].id == notification1_id + assert persisted_notification[0].to == "+1 650 253 2221" + assert persisted_notification[1].to == "+1 650 253 2222" + assert persisted_notification[2].to == "+1 650 253 2223" + assert persisted_notification[0].template_id == sample_template_with_placeholders.id + assert persisted_notification[1].template_version == sample_template_with_placeholders.version + assert persisted_notification[0].status == "created" + assert persisted_notification[0].personalisation == {"name": "Jo"} + assert persisted_notification[0]._personalisation == signer_personalisation.sign({"name": "Jo"}) + assert persisted_notification[0].notification_type == SMS_TYPE + + acknowledge_mock.assert_called_once_with(receipt) + + def test_should_save_smss_acknowledge_queue(self, sample_template_with_placeholders, notify_api, mocker): + notification1 = _notification_json( + sample_template_with_placeholders, + to="+1 650 253 2221", + personalisation={"name": "Jo"}, + ) + notification1_id = uuid.uuid4() + notification1["id"] = str(notification1_id) - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_email"), - ) - mocker.patch("app.celery.tasks.save_email.apply_async") - mocker.patch("app.encryption.encrypt", return_value="something_encrypted") - mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") - - process_job(job.id) - - s3.get_job_from_s3.assert_called_once_with(str(job.service.id), str(job.id)) - job = jobs_dao.dao_get_job_by_id(job.id) - assert job.job_status == "finished" - tasks.save_email.apply_async.assert_called_with( - ( - str(job.service_id), - "uuid", - "something_encrypted", - ), - {}, - queue="database-tasks", - ) + notification2 = _notification_json( + sample_template_with_placeholders, to="+1 650 253 2222", personalisation={"name": "Test2"} + ) + notification3 = _notification_json( + sample_template_with_placeholders, to="+1 650 253 2223", personalisation={"name": "Test3"} + ) -def test_should_not_create_save_task_for_empty_file(sample_job, mocker): - mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=load_example_csv("empty")) - mocker.patch("app.celery.tasks.save_sms.apply_async") + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + acknowldege_mock = mocker.patch("app.sms_normal.acknowledge") + + receipt = uuid.uuid4() + save_smss( + str(sample_template_with_placeholders.service.id), + [ + signer_notification.sign(notification1), + signer_notification.sign(notification2), + signer_notification.sign(notification3), + ], + receipt, + ) - process_job(sample_job.id) + persisted_notification = Notification.query.all() + assert persisted_notification[0].id == notification1_id + assert persisted_notification[0].to == "+1 650 253 2221" + assert persisted_notification[1].to == "+1 650 253 2222" + assert persisted_notification[2].to == "+1 650 253 2223" + assert persisted_notification[0].template_id == sample_template_with_placeholders.id + assert persisted_notification[1].template_version == sample_template_with_placeholders.version + assert persisted_notification[0].status == "created" + assert persisted_notification[0].personalisation == {"name": "Jo"} + assert persisted_notification[0]._personalisation == signer_personalisation.sign({"name": "Jo"}) + assert persisted_notification[0].notification_type == SMS_TYPE + + acknowldege_mock.assert_called_once_with(receipt) + + def test_should_save_emails(self, sample_email_template_with_placeholders, mocker): + notification1 = _notification_json( + sample_email_template_with_placeholders, + to="test1@gmail.com", + personalisation={"name": "Jo"}, + ) + notification1_id = uuid.uuid4() + notification1["id"] = str(notification1_id) - s3.get_job_from_s3.assert_called_once_with(str(sample_job.service.id), str(sample_job.id)) - job = jobs_dao.dao_get_job_by_id(sample_job.id) - assert job.job_status == "finished" - assert tasks.save_sms.apply_async.called is False + notification2 = _notification_json( + sample_email_template_with_placeholders, to="test2@gmail.com", personalisation={"name": "Test2"} + ) + notification3 = _notification_json( + sample_email_template_with_placeholders, to="test3@gmail.com", personalisation={"name": "Test3"} + ) -def test_should_process_email_job(email_job_with_placeholders, mocker): - email_csv = """email_address,name - test@test.com,foo - """ - mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=email_csv) - mocker.patch("app.celery.tasks.save_email.apply_async") - mocker.patch("app.encryption.encrypt", return_value="something_encrypted") - mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") - redis_mock = mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + acknowledge_mock = mocker.patch("app.email_normal.acknowledge") - process_job(email_job_with_placeholders.id) + receipt = uuid.uuid4() - s3.get_job_from_s3.assert_called_once_with(str(email_job_with_placeholders.service.id), str(email_job_with_placeholders.id)) - assert encryption.encrypt.call_args[0][0]["to"] == "test@test.com" - assert encryption.encrypt.call_args[0][0]["template"] == str(email_job_with_placeholders.template.id) - assert encryption.encrypt.call_args[0][0]["template_version"] == email_job_with_placeholders.template.version - assert encryption.encrypt.call_args[0][0]["personalisation"] == { - "emailaddress": "test@test.com", - "name": "foo", - } - tasks.save_email.apply_async.assert_called_once_with( - ( - str(email_job_with_placeholders.service_id), - "uuid", - "something_encrypted", - ), - {}, - queue="database-tasks", - ) - job = jobs_dao.dao_get_job_by_id(email_job_with_placeholders.id) - assert job.job_status == "finished" - assert job.processing_started is not None - assert job.created_at is not None - redis_mock.assert_called_once_with("job.processing-start-delay", job.processing_started, job.created_at) - - -def test_should_process_email_job_with_sender_id(sample_email_template, mocker, fake_uuid): - email_csv = """email_address,name - test@test.com,foo - """ - job = create_job(template=sample_email_template, sender_id=fake_uuid) - mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=email_csv) - mocker.patch("app.celery.tasks.save_email.apply_async") - mocker.patch("app.encryption.encrypt", return_value="something_encrypted") - mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") - - process_job(job.id) - - tasks.save_email.apply_async.assert_called_once_with( - (str(job.service_id), "uuid", "something_encrypted"), - {"sender_id": fake_uuid}, - queue="database-tasks", - ) + save_emails( + str(sample_email_template_with_placeholders.service.id), + [ + signer_notification.sign(notification1), + signer_notification.sign(notification2), + signer_notification.sign(notification3), + ], + receipt, + ) + persisted_notification = Notification.query.all() + assert persisted_notification[0].id == notification1_id + assert persisted_notification[0].to == "test1@gmail.com" + assert persisted_notification[1].to == "test2@gmail.com" + assert persisted_notification[2].to == "test3@gmail.com" + assert persisted_notification[0].template_id == sample_email_template_with_placeholders.id + assert persisted_notification[1].template_version == sample_email_template_with_placeholders.version + assert persisted_notification[0].status == "created" + assert persisted_notification[0].personalisation == {"name": "Jo"} + assert persisted_notification[0]._personalisation == signer_personalisation.sign({"name": "Jo"}) + assert persisted_notification[0].notification_type == EMAIL_TYPE + + acknowledge_mock.assert_called_once_with(receipt) + + def test_should_not_forward_sms_on_duplicate(self, sample_template_with_placeholders, mocker): + notification1 = _notification_json( + sample_template_with_placeholders, + to="+1 650 253 2221", + personalisation={"name": "Jo"}, + ) + notification1["id"] = str(uuid.uuid4()) + notification1["service_id"] = str(sample_template_with_placeholders.service.id) -@freeze_time("2016-01-01 11:09:00.061258") -def test_should_process_letter_job(sample_letter_job, mocker): - csv = """address_line_1,address_line_2,address_line_3,address_line_4,postcode,name - A1,A2,A3,A4,A_POST,Alice - """ - s3_mock = mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=csv) - process_row_mock = mocker.patch("app.celery.tasks.process_row") - mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") - - process_job(sample_letter_job.id) - - s3_mock.assert_called_once_with(str(sample_letter_job.service.id), str(sample_letter_job.id)) - - row_call = process_row_mock.mock_calls[0][1] - assert row_call[0].index == 0 - assert row_call[0].recipient == ["A1", "A2", "A3", "A4", None, None, "A_POST"] - assert row_call[0].personalisation == { - "addressline1": "A1", - "addressline2": "A2", - "addressline3": "A3", - "addressline4": "A4", - "postcode": "A_POST", - } - assert row_call[2] == sample_letter_job - assert row_call[3] == sample_letter_job.service + mock_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + mock_persist_notifications = mocker.patch( + "app.celery.tasks.persist_notifications", side_effect=IntegrityError(None, None, None) + ) + mock_get_notification = mocker.patch("app.celery.tasks.get_notification_by_id", return_value=notification1) + mock_save_sms = mocker.patch("app.celery.tasks.save_smss.apply_async") + mock_acknowldege = mocker.patch("app.sms_normal.acknowledge") - assert process_row_mock.call_count == 1 + receipt = uuid.uuid4() + notifications = [signer_notification.sign(notification1)] - assert sample_letter_job.job_status == "finished" + save_smss( + None, + notifications, + receipt, + ) + mock_deliver_sms.assert_not_called() + mock_persist_notifications.assert_called_once() + mock_get_notification.assert_called_once_with(notification1["id"]) + mock_save_sms.assert_not_called() + mock_acknowldege.assert_called_once_with(receipt) -def test_should_process_all_sms_job(sample_job_with_placeholdered_template, mocker): - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_sms"), - ) - mocker.patch("app.celery.tasks.save_sms.apply_async") - mocker.patch("app.encryption.encrypt", return_value="something_encrypted") - mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") + def test_should_not_forward_email_on_duplicate(self, sample_email_template_with_placeholders, mocker): + notification1 = _notification_json( + sample_email_template_with_placeholders, + to="test1@gmail.com", + personalisation={"name": "Jo"}, + ) + notification1["id"] = str(uuid.uuid4()) + notification1["service_id"] = str(sample_email_template_with_placeholders.service.id) - process_job(sample_job_with_placeholdered_template.id) + mock_deliver_email = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + mock_persist_notifications = mocker.patch( + "app.celery.tasks.persist_notifications", side_effect=IntegrityError(None, None, None) + ) + mock_get_notification = mocker.patch("app.celery.tasks.get_notification_by_id", return_value=notification1) + mock_save_email = mocker.patch("app.celery.tasks.save_emails.apply_async") + mock_acknowldege = mocker.patch("app.email_normal.acknowledge") - s3.get_job_from_s3.assert_called_once_with( - str(sample_job_with_placeholdered_template.service.id), - str(sample_job_with_placeholdered_template.id), - ) - assert encryption.encrypt.call_args[0][0]["to"] == "+441234123120" - assert encryption.encrypt.call_args[0][0]["template"] == str(sample_job_with_placeholdered_template.template.id) - assert ( - encryption.encrypt.call_args[0][0]["template_version"] == sample_job_with_placeholdered_template.template.version - ) # noqa - assert encryption.encrypt.call_args[0][0]["personalisation"] == { - "phonenumber": "+441234123120", - "name": "chris", - } - assert tasks.save_sms.apply_async.call_count == 10 - job = jobs_dao.dao_get_job_by_id(sample_job_with_placeholdered_template.id) - assert job.job_status == "finished" + receipt = uuid.uuid4() + notifications = [signer_notification.sign(notification1)] + save_emails( + None, + notifications, + receipt, + ) -# -------------- process_row tests -------------- # + mock_deliver_email.assert_not_called() + mock_persist_notifications.assert_called_once() + mock_get_notification.assert_called_once_with(notification1["id"]) + mock_save_email.assert_not_called() + mock_acknowldege.assert_called_once_with(receipt) + + def test_should_process_smss_job_metric_check(self, mocker): + pbsbc_mock = mocker.patch("app.celery.tasks.put_batch_saving_bulk_created") + service = create_service(message_limit=20) + template = create_template(service=service) + job = create_job(template=template, notification_count=10, original_file_name="multiple_sms.csv") + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mocker.patch("app.celery.tasks.save_smss.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + redis_mock = mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") + process_job(job.id) -@pytest.mark.parametrize( - "template_type, research_mode, expected_function, expected_queue, api_key_id, sender_id", - [ - (SMS_TYPE, False, "save_sms", "database-tasks", None, None), - (SMS_TYPE, True, "save_sms", "research-mode-tasks", uuid.uuid4(), uuid.uuid4()), - (EMAIL_TYPE, False, "save_email", "database-tasks", uuid.uuid4(), uuid.uuid4()), - (EMAIL_TYPE, True, "save_email", "research-mode-tasks", None, None), - (LETTER_TYPE, False, "save_letter", "database-tasks", None, None), - (LETTER_TYPE, True, "save_letter", "research-mode-tasks", uuid.uuid4(), uuid.uuid4()), - ], -) -def test_process_row_sends_save_task( - notify_api, template_type, research_mode, expected_function, expected_queue, api_key_id, sender_id, mocker -): - service_allowed_to_send_to_mock = mocker.patch("app.service.utils.safelisted_members", return_value=None) - mocker.patch("app.celery.tasks.create_uuid", return_value="noti_uuid") - task_mock = mocker.patch("app.celery.tasks.{}.apply_async".format(expected_function)) - encrypt_mock = mocker.patch("app.celery.tasks.encryption.encrypt") - template = Mock(id="template_id", template_type=template_type) - job = Mock(id="job_id", template_version="temp_vers", notification_count=1, api_key_id=api_key_id, sender_id=sender_id) - service = Mock(id="service_id", research_mode=research_mode) - - process_row( - Row( - {"foo": "bar", "to": "recip"}, - index="row_num", - error_fn=lambda k, v: None, - recipient_column_headers=["to"], - placeholders={"foo"}, - template=template, - ), - template, - job, - service, - ) + s3.get_job_from_s3.assert_called_once_with(str(job.service.id), str(job.id)) - assert service_allowed_to_send_to_mock.called - encrypt_mock.assert_called_once_with( - { - "api_key": None if api_key_id is None else str(api_key_id), - "template": "template_id", - "template_version": "temp_vers", - "job": "job_id", - "to": "recip", - "row_number": "row_num", - "personalisation": {"foo": "bar"}, - "queue": None, + assert signer_notification.sign.call_args[0][0]["to"] == "+441234123120" + assert signer_notification.sign.call_args[0][0]["template"] == str(template.id) + assert signer_notification.sign.call_args[0][0]["template_version"] == template.version + assert signer_notification.sign.call_args[0][0]["personalisation"] == { + "phonenumber": "+441234123120", } - ) - task_mock.assert_called_once_with( - ( - "service_id", - "noti_uuid", - # encrypted data - encrypt_mock.return_value, - ), - {"sender_id": str(sender_id)} if sender_id else {}, - queue=expected_queue, - ) - - -def test_should_not_save_sms_if_restricted_service_and_invalid_number(notify_db_session, mocker): - user = create_user(mobile_number="6502532222") - service = create_service(user=user, restricted=True) - template = create_template(service=service) - job = create_job(template) - notification = _notification_json(template, to="07700 900849") - - save_sms_mock = mocker.patch("app.celery.tasks.save_sms") - - process_row( - Row( - {"foo": "bar", "to": notification["to"]}, - index="row_num", - error_fn=lambda k, v: None, - recipient_column_headers=["to"], - placeholders={"foo"}, - template=SMSMessageTemplate(template.__dict__), - ), - template, - job, - service, - ) + tasks.save_smss.apply_async.assert_called_once_with( + ( + str(job.service_id), + [ + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + ], + None, + ), + queue="-normal-database-tasks", + ) + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == "in progress" + assert job.processing_started is not None + assert job.created_at is not None + redis_mock.assert_called_once_with("job.processing-start-delay", job.processing_started, job.created_at) + assert pbsbc_mock.assert_called_with(mock.ANY, 1, notification_type="sms", priority="normal") is None + + def test_process_smss_job_metric(self, sample_template_with_placeholders, mocker): + pbsbp_mock = mocker.patch("app.celery.tasks.put_batch_saving_bulk_processed") + notification1 = _notification_json( + sample_template_with_placeholders, + to="+1 650 253 2221", + personalisation={"name": "Jo"}, + ) + notification1_id = uuid.uuid4() + notification1["id"] = str(notification1_id) - assert not save_sms_mock.called - - -def test_should_not_save_sms_if_team_key_and_recipient_not_in_team(notify_db_session, mocker): - user = create_user(mobile_number="6502532222") - service = create_service(user=user, restricted=True) - template = create_template(service=service) - job = create_job(template) - notification = _notification_json(template, to="07700 900849") - - team_members = [user.mobile_number for user in service.users] - assert "07890 300000" not in team_members - - save_sms_mock = mocker.patch("app.celery.tasks.save_sms.apply_async") - - process_row( - Row( - {"foo": "bar", "to": notification["to"]}, - index="row_num", - error_fn=lambda k, v: None, - recipient_column_headers=["to"], - placeholders={"foo"}, - template=SMSMessageTemplate(template.__dict__), - ), - template, - job, - service, - ) + notification2 = _notification_json( + sample_template_with_placeholders, to="+1 650 253 2222", personalisation={"name": "Test2"} + ) - assert not save_sms_mock.called - - -def test_should_not_save_email_if_restricted_service_and_invalid_email_address(notify_db_session, mocker): - user = create_user() - service = create_service(user=user, restricted=True) - template = create_template(service=service, template_type="email", subject="Hello") - job = create_job(template) - notification = _notification_json(template, to="test@example.com") - - save_email_mock = mocker.patch("app.celery.tasks.save_email") - - process_row( - Row( - {"foo": "bar", "to": notification["to"]}, - index="row_num", - error_fn=lambda k, v: None, - recipient_column_headers=["to"], - placeholders={"foo"}, - template=WithSubjectTemplate(template.__dict__), - ), - template, - job, - service, - ) + notification3 = _notification_json( + sample_template_with_placeholders, to="+1 650 253 2223", personalisation={"name": "Test3"} + ) - assert not save_email_mock.called + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + save_smss( + str(sample_template_with_placeholders.service.id), + [ + signer_notification.sign(notification1), + signer_notification.sign(notification2), + signer_notification.sign(notification3), + ], + None, + ) -# -------- save_sms and save_email tests -------- # + persisted_notification = Notification.query.all() + assert persisted_notification[0].id == notification1_id + assert persisted_notification[0].to == "+1 650 253 2221" + assert persisted_notification[1].to == "+1 650 253 2222" + assert persisted_notification[2].to == "+1 650 253 2223" + assert persisted_notification[0].template_id == sample_template_with_placeholders.id + assert persisted_notification[1].template_version == sample_template_with_placeholders.version + assert persisted_notification[0].status == "created" + assert persisted_notification[0].personalisation == {"name": "Jo"} + assert persisted_notification[0]._personalisation == signer_personalisation.sign({"name": "Jo"}) + assert persisted_notification[0].notification_type == SMS_TYPE + assert pbsbp_mock.assert_called_with(mock.ANY, 1, notification_type="sms", priority="normal") is None + + +class TestUpdateJob: + def test_update_job(self, sample_template, sample_job, mocker): + latest = save_notification(create_notification(job=sample_job, updated_at=datetime.utcnow())) + save_notification(create_notification(job=sample_job)) + mocker.patch("app.celery.tasks.dao_get_in_progress_jobs", return_value=[sample_job]) + mocker.patch("app.celery.tasks.get_latest_sent_notification_for_job", return_value=latest) + + update_in_progress_jobs() + updated_job = jobs_dao.dao_get_job_by_id(sample_job.id) + assert updated_job.updated_at == latest.updated_at + + def test_update_job_should_not_update_if_no_sent_notifications(self, sample_job, mocker): + mocker.patch("app.celery.tasks.dao_get_in_progress_jobs", return_value=[sample_job]) + mocker.patch("app.celery.tasks.get_latest_sent_notification_for_job", return_value=None) + mocked_update_job = mocker.patch("app.celery.tasks.dao_update_job") + + update_in_progress_jobs() + mocked_update_job.assert_not_called() + + +class TestProcessJob: + def test_should_process_sms_job_FF_PRIORITY_LANES_true(self, sample_job, mocker): + mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=load_example_csv("sms")) + mocker.patch("app.celery.tasks.save_smss.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") + + redis_mock = mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") + + process_job(sample_job.id) + s3.get_job_from_s3.assert_called_once_with(str(sample_job.service.id), str(sample_job.id)) + assert signer_notification.sign.call_args[0][0]["to"] == "+441234123123" + assert signer_notification.sign.call_args[0][0]["template"] == str(sample_job.template.id) + assert signer_notification.sign.call_args[0][0]["template_version"] == sample_job.template.version + assert signer_notification.sign.call_args[0][0]["personalisation"] == {"phonenumber": "+441234123123"} + assert signer_notification.sign.call_args[0][0]["row_number"] == 0 + tasks.save_smss.apply_async.assert_called_once_with( + (str(sample_job.service_id), ["something_encrypted"], None), queue=QueueNames.NORMAL_DATABASE + ) + job = jobs_dao.dao_get_job_by_id(sample_job.id) + assert job.job_status == "in progress" + assert job.processing_started is not None + assert job.created_at is not None + redis_mock.assert_called_once_with("job.processing-start-delay", job.processing_started, job.created_at) + + def test_should_process_sms_job_with_sender_id(self, sample_template, mocker, fake_uuid): + job = create_job(template=sample_template, sender_id=fake_uuid) + mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=load_example_csv("sms")) + mocker.patch("app.celery.tasks.save_smss.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") + + process_job(job.id) + + tasks.save_smss.apply_async.assert_called_once_with( + (str(job.service_id), ["something_encrypted"], None), + queue="-normal-database-tasks", + ) + @freeze_time("2016-01-01 11:09:00.061258") + def test_should_not_process_sms_job_if_would_exceed_send_limits(self, notify_db_session, mocker): + service = create_service(message_limit=9) + template = create_template(service=service) + job = create_job(template=template, notification_count=10, original_file_name="multiple_sms.csv") + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mocker.patch("app.celery.tasks.process_rows") -def test_should_send_template_to_correct_sms_task_and_persist(sample_template_with_placeholders, mocker): - notification = _notification_json( - sample_template_with_placeholders, - to="+1 650 253 2222", - personalisation={"name": "Jo"}, - ) + process_job(job.id) - mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == "sending limits exceeded" + assert s3.get_job_from_s3.called is False + assert tasks.process_rows.called is False - save_sms( - sample_template_with_placeholders.service_id, - uuid.uuid4(), - encryption.encrypt(notification), - ) + def test_should_not_process_sms_job_if_would_exceed_send_limits_inc_today(self, notify_db_session, mocker): + service = create_service(message_limit=1) + template = create_template(service=service) + job = create_job(template=template) - persisted_notification = Notification.query.one() - assert persisted_notification.to == "+1 650 253 2222" - assert persisted_notification.template_id == sample_template_with_placeholders.id - assert persisted_notification.template_version == sample_template_with_placeholders.version - assert persisted_notification.status == "created" - assert persisted_notification.created_at <= datetime.utcnow() - assert not persisted_notification.sent_at - assert not persisted_notification.sent_by - assert not persisted_notification.job_id - assert persisted_notification.personalisation == {"name": "Jo"} - assert persisted_notification._personalisation == encryption.encrypt({"name": "Jo"}) - assert persisted_notification.notification_type == "sms" - mocked_deliver_sms.assert_called_once_with([str(persisted_notification.id)], queue="send-sms-tasks") - - -@pytest.mark.parametrize("sender_id", [None, "996958a8-0c06-43be-a40e-56e4a2d1655c"]) -def test_save_sms_should_use_redis_cache_to_retrieve_service_and_template_when_possible( - sample_template_with_placeholders, mocker, sender_id -): - notification = _notification_json( - sample_template_with_placeholders, - to="+1 650 253 2222", - personalisation={"name": "Jo"}, - ) + save_notification(create_notification(template=template, job=job)) - sms_sender = ServiceSmsSender() - sms_sender.sms_sender = "+16502532222" - - mocked_get_sender_id = mocker.patch("app.celery.tasks.dao_get_service_sms_senders_by_id", return_value=sms_sender) - celery_task = "deliver_throttled_sms" if sender_id else "deliver_sms" - mocked_deliver_sms = mocker.patch(f"app.celery.provider_tasks.{celery_task}.apply_async") - json_template_date = {"data": template_schema.dump(sample_template_with_placeholders).data} - json_service_data = {"data": service_schema.dump(sample_template_with_placeholders.service).data} - mocked_redis_get = mocker.patch.object(redis_store, "get") - - mocked_redis_get.side_effect = [ - bytes(json.dumps(json_service_data, default=lambda o: hex if isinstance(o, uuid.UUID) else None), encoding="utf-8"), - bytes(json.dumps(json_template_date, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8"), - False, - ] - - save_sms(sample_template_with_placeholders.service_id, uuid.uuid4(), encryption.encrypt(notification), sender_id) - - assert mocked_redis_get.called - persisted_notification = Notification.query.one() - assert persisted_notification.to == "+1 650 253 2222" - assert persisted_notification.template_id == sample_template_with_placeholders.id - assert persisted_notification.template_version == sample_template_with_placeholders.version - assert persisted_notification.status == "created" - assert persisted_notification.created_at <= datetime.utcnow() - assert not persisted_notification.sent_at - assert not persisted_notification.sent_by - assert not persisted_notification.job_id - assert persisted_notification.personalisation == {"name": "Jo"} - assert persisted_notification._personalisation == encryption.encrypt({"name": "Jo"}) - assert persisted_notification.notification_type == "sms" - mocked_deliver_sms.assert_called_once_with( - [str(persisted_notification.id)], queue="send-throttled-sms-tasks" if sender_id else "send-sms-tasks" - ) - if sender_id: - mocked_get_sender_id.assert_called_once_with(persisted_notification.service_id, sender_id) + mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=load_example_csv("sms")) + mocker.patch("app.celery.tasks.process_rows") + process_job(job.id) -@pytest.mark.parametrize("sender_id", [None, "996958a8-0c06-43be-a40e-56e4a2d1655c"]) -def test_save_email_should_use_redis_cache_to_retrieve_service_and_template_when_possible(sample_service, mocker, sender_id): - sample_template = create_template( - template_name="Test Template", - template_type="email", - content="Hello (( Name))\nYour thing is due soon", - service=sample_service, - ) + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == "sending limits exceeded" + assert s3.get_job_from_s3.called is False + assert tasks.process_rows.called is False - notification = _notification_json( - sample_template, - to="test@unittest.com", - personalisation={"name": "Jo"}, - ) + @pytest.mark.parametrize("template_type", ["sms", "email"]) + def test_should_not_process_email_job_if_would_exceed_send_limits_inc_today(self, notify_db_session, template_type, mocker): + service = create_service(message_limit=1) + template = create_template(service=service, template_type=template_type) + job = create_job(template=template) - reply_to = ServiceEmailReplyTo() - reply_to.email_address = "notify@digital.cabinet-office.gov.uk" - mocked_get_sender_id = mocker.patch("app.celery.tasks.dao_get_reply_to_by_id", return_value=reply_to) - mocked_deliver_email = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - - json_template_date = {"data": template_schema.dump(sample_template).data} - json_service_data = {"data": service_schema.dump(sample_service).data} - mocked_redis_get = mocker.patch.object(redis_store, "get") - - mocked_redis_get.side_effect = [ - bytes(json.dumps(json_service_data, default=lambda o: hex if isinstance(o, uuid.UUID) else None), encoding="utf-8"), - bytes(json.dumps(json_template_date, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8"), - False, - ] - - save_email(sample_template.service_id, uuid.uuid4(), encryption.encrypt(notification), sender_id) - - assert mocked_redis_get.called - persisted_notification = Notification.query.one() - assert persisted_notification.to == "test@unittest.com" - assert persisted_notification.template_id == sample_template.id - assert persisted_notification.template_version == sample_template.version - assert persisted_notification.status == "created" - assert persisted_notification.created_at <= datetime.utcnow() - assert not persisted_notification.sent_at - assert not persisted_notification.sent_by - assert not persisted_notification.job_id - assert persisted_notification.personalisation == {"name": "Jo"} - assert persisted_notification._personalisation == encryption.encrypt({"name": "Jo"}) - assert persisted_notification.notification_type == "email" - mocked_deliver_email.assert_called_once_with([str(persisted_notification.id)], queue="send-email-tasks") - if sender_id: - mocked_get_sender_id.assert_called_once_with(persisted_notification.service_id, sender_id) - - -def test_should_put_save_sms_task_in_research_mode_queue_if_research_mode_service(notify_db, notify_db_session, mocker): - service = create_service( - research_mode=True, - ) + save_notification(create_notification(template=template, job=job)) - template = create_template(service=service) + mocker.patch("app.celery.tasks.s3.get_job_from_s3") + mocker.patch("app.celery.tasks.process_rows") - notification = _notification_json(template, to="+1 650 253 2222") + process_job(job.id) - mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == "sending limits exceeded" + assert s3.get_job_from_s3.called is False + assert tasks.process_rows.called is False - notification_id = uuid.uuid4() + def test_should_not_process_job_if_already_pending(self, sample_template, mocker): + job = create_job(template=sample_template, job_status="scheduled") - save_sms( - template.service_id, - notification_id, - encryption.encrypt(notification), - ) - persisted_notification = Notification.query.one() - provider_tasks.deliver_sms.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="research-mode-tasks") - assert mocked_deliver_sms.called + mocker.patch("app.celery.tasks.s3.get_job_from_s3") + mocker.patch("app.celery.tasks.process_rows") + process_job(job.id) -@pytest.mark.parametrize("process_type", ["priority", "bulk"]) -def test_should_route_save_sms_task_to_appropriate_queue_according_to_template_process_type( - notify_db, notify_db_session, mocker, process_type -): - service = create_service() - template = create_template(service=service, process_type=process_type) - notification = _notification_json(template, to="+1 650 253 2222") + assert s3.get_job_from_s3.called is False + assert tasks.process_rows.called is False - mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + def test_should_process_email_job_if_exactly_on_send_limits(self, notify_db_session, mocker): + service = create_service(message_limit=10) + template = create_template(service=service, template_type="email") + job = create_job(template=template, notification_count=10) - notification_id = uuid.uuid4() + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_email"), + ) + mocker.patch("app.celery.tasks.save_emails.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") + + process_job(job.id) + + s3.get_job_from_s3.assert_called_once_with(str(job.service.id), str(job.id)) + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == "in progress" + tasks.save_emails.apply_async.assert_called_with( + ( + str(job.service_id), + [ + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + ], + None, + ), + queue="-normal-database-tasks", + ) - save_sms( - template.service_id, - notification_id, - encryption.encrypt(notification), - ) - persisted_notification = Notification.query.one() - provider_tasks.deliver_sms.apply_async.assert_called_once_with( - [str(persisted_notification.id)], queue=f"{process_type}-tasks" - ) - assert mocked_deliver_sms.called + def test_should_process_smss_job(self, notify_db_session, mocker): + service = create_service(message_limit=20) + template = create_template(service=service) + job = create_job(template=template, notification_count=10, original_file_name="multiple_sms.csv") + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mocker.patch("app.celery.tasks.save_smss.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + redis_mock = mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") + process_job(job.id) -def test_should_route_save_sms_task_to_bulk_on_large_csv_file(notify_db, notify_db_session, mocker): - service = create_service() - template = create_template(service=service, process_type="normal") - notification = _notification_json(template, to="+1 650 253 2222", queue="bulk-tasks") + s3.get_job_from_s3.assert_called_once_with(str(job.service.id), str(job.id)) - mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + assert signer_notification.sign.call_args[0][0]["to"] == "+441234123120" + assert signer_notification.sign.call_args[0][0]["template"] == str(template.id) + assert signer_notification.sign.call_args[0][0]["template_version"] == template.version + assert signer_notification.sign.call_args[0][0]["personalisation"] == { + "phonenumber": "+441234123120", + } + tasks.save_smss.apply_async.assert_called_once_with( + ( + str(job.service_id), + [ + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + "something_encrypted", + ], + None, + ), + queue="-normal-database-tasks", + ) + job = jobs_dao.dao_get_job_by_id(job.id) + assert job.job_status == "in progress" + assert job.processing_started is not None + assert job.created_at is not None + redis_mock.assert_called_once_with("job.processing-start-delay", job.processing_started, job.created_at) + + def test_should_not_create_save_task_for_empty_file(self, sample_job, mocker): + mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=load_example_csv("empty")) + mocker.patch("app.celery.tasks.save_smss.apply_async") + + process_job(sample_job.id) + + s3.get_job_from_s3.assert_called_once_with(str(sample_job.service.id), str(sample_job.id)) + job = jobs_dao.dao_get_job_by_id(sample_job.id) + assert job.job_status == "in progress" + assert tasks.save_smss.apply_async.called is False + + def test_should_process_email_job(self, email_job_with_placeholders, mocker): + email_csv = """email_address,name + test@test.com,foo + """ + mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=email_csv) + mocker.patch("app.celery.tasks.save_emails.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") + redis_mock = mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") + + process_job(email_job_with_placeholders.id) + + s3.get_job_from_s3.assert_called_once_with( + str(email_job_with_placeholders.service.id), str(email_job_with_placeholders.id) + ) + assert signer_notification.sign.call_args[0][0]["to"] == "test@test.com" + assert signer_notification.sign.call_args[0][0]["template"] == str(email_job_with_placeholders.template.id) + assert signer_notification.sign.call_args[0][0]["template_version"] == email_job_with_placeholders.template.version + assert signer_notification.sign.call_args[0][0]["personalisation"] == { + "emailaddress": "test@test.com", + "name": "foo", + } + tasks.save_emails.apply_async.assert_called_once_with( + (str(email_job_with_placeholders.service_id), ["something_encrypted"], None), + queue="-normal-database-tasks", + ) + job = jobs_dao.dao_get_job_by_id(email_job_with_placeholders.id) + assert job.job_status == "in progress" + assert job.processing_started is not None + assert job.created_at is not None + redis_mock.assert_called_once_with("job.processing-start-delay", job.processing_started, job.created_at) + + def test_should_process_emails_job(self, email_job_with_placeholders, mocker): + email_csv = """email_address,name + test@test.com,foo + YOLO@test2.com,foo2 + yolo2@test2.com,foo3 + yolo3@test3.com,foo4 + """ + mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=email_csv) + mocker.patch("app.celery.tasks.save_emails.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + redis_mock = mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") + + process_job(email_job_with_placeholders.id) + + s3.get_job_from_s3.assert_called_once_with( + str(email_job_with_placeholders.service.id), str(email_job_with_placeholders.id) + ) - notification_id = uuid.uuid4() + assert signer_notification.sign.call_args[0][0]["to"] == "yolo3@test3.com" + assert signer_notification.sign.call_args[0][0]["template"] == str(email_job_with_placeholders.template.id) + assert signer_notification.sign.call_args[0][0]["template_version"] == email_job_with_placeholders.template.version + assert signer_notification.sign.call_args[0][0]["personalisation"] == { + "emailaddress": "yolo3@test3.com", + "name": "foo4", + } + tasks.save_emails.apply_async.assert_called_once_with( + ( + str(email_job_with_placeholders.service_id), + ["something_encrypted", "something_encrypted", "something_encrypted", "something_encrypted"], + None, + ), + queue="-normal-database-tasks", + ) + job = jobs_dao.dao_get_job_by_id(email_job_with_placeholders.id) + assert job.job_status == "in progress" + assert job.processing_started is not None + assert job.created_at is not None + redis_mock.assert_called_once_with("job.processing-start-delay", job.processing_started, job.created_at) + + def test_should_process_email_job_with_sender_id(self, sample_email_template, mocker, fake_uuid): + email_csv = """email_address,name + test@test.com,foo + """ + job = create_job(template=sample_email_template, sender_id=fake_uuid) + mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=email_csv) + mocker.patch("app.celery.tasks.save_emails.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") + + process_job(job.id) + + tasks.save_emails.apply_async.assert_called_once_with( + (str(job.service_id), ["something_encrypted"], None), queue="-normal-database-tasks" + ) - save_sms( - template.service_id, - notification_id, - encryption.encrypt(notification), - ) - persisted_notification = Notification.query.one() - provider_tasks.deliver_sms.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="bulk-tasks") - assert mocked_deliver_sms.called + @pytest.mark.skip(reason="the code paths don't exist for letter implementation") + @freeze_time("2016-01-01 11:09:00.061258") + def test_should_process_letter_job(self, sample_letter_job, mocker): + csv = """address_line_1,address_line_2,address_line_3,address_line_4,postcode,name + A1,A2,A3,A4,A_POST,Alice + """ + s3_mock = mocker.patch("app.celery.tasks.s3.get_job_from_s3", return_value=csv) + process_row_mock = mocker.patch("app.celery.tasks.process_row") + mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") + + process_job(sample_letter_job.id) + + s3_mock.assert_called_once_with(str(sample_letter_job.service.id), str(sample_letter_job.id)) + + row_call = process_row_mock.mock_calls[0][1] + assert row_call[0].index == 0 + assert row_call[0].recipient == ["A1", "A2", "A3", "A4", None, None, "A_POST"] + assert row_call[0].personalisation == { + "addressline1": "A1", + "addressline2": "A2", + "addressline3": "A3", + "addressline4": "A4", + "postcode": "A_POST", + } + assert row_call[2] == sample_letter_job + assert row_call[3] == sample_letter_job.service + assert process_row_mock.call_count == 1 -def test_should_route_save_sms_task_to_throttled_queue_on_large_csv_file_if_custom_sms_sender( - notify_db, notify_db_session, mocker -): - service = create_service_with_defined_sms_sender(sms_sender_value="3433061234") - template = create_template(service=service, process_type="normal") - notification = _notification_json(template, to="+1 650 253 2222", queue="bulk-tasks") + assert sample_letter_job.job_status == "finished" - mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_throttled_sms.apply_async") - mocked_deliver_throttled_sms = mocker.patch("app.celery.provider_tasks.deliver_throttled_sms.apply_async") + def test_should_process_all_sms_job(self, sample_job_with_placeholdered_template, mocker): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mocker.patch("app.celery.tasks.save_smss.apply_async") + mocker.patch("app.signer_notification.sign", return_value="something_encrypted") + mocker.patch("app.celery.tasks.create_uuid", return_value="uuid") - notification_id = uuid.uuid4() + process_job(sample_job_with_placeholdered_template.id) - save_sms( - template.service_id, - notification_id, - encryption.encrypt(notification), - ) + s3.get_job_from_s3.assert_called_once_with( + str(sample_job_with_placeholdered_template.service.id), + str(sample_job_with_placeholdered_template.id), + ) + assert signer_notification.sign.call_args[0][0]["to"] == "+441234123120" + assert signer_notification.sign.call_args[0][0]["template"] == str(sample_job_with_placeholdered_template.template.id) + assert ( + signer_notification.sign.call_args[0][0]["template_version"] + == sample_job_with_placeholdered_template.template.version + ) # noqa + assert signer_notification.sign.call_args[0][0]["personalisation"] == { + "phonenumber": "+441234123120", + "name": "chris", + } + assert tasks.save_smss.apply_async.call_count == 1 + job = jobs_dao.dao_get_job_by_id(sample_job_with_placeholdered_template.id) + assert job.job_status == "in progress" + + def test_should_cancel_job_if_service_is_inactive(self, sample_service, sample_job, mocker): + sample_service.active = False + + mocker.patch("app.celery.tasks.s3.get_job_from_s3") + mocker.patch("app.celery.tasks.process_rows") + + process_job(sample_job.id) + + job = jobs_dao.dao_get_job_by_id(sample_job.id) + assert job.job_status == "cancelled" + s3.get_job_from_s3.assert_not_called() + tasks.process_rows.assert_not_called() + + +class TestProcessRows: + @pytest.mark.parametrize( + "template_type, research_mode, expected_function, expected_queue, api_key_id, sender_id, reference", + [ + (SMS_TYPE, False, "save_smss", "-normal-database-tasks", None, None, None), + (SMS_TYPE, True, "save_smss", "research-mode-tasks", uuid.uuid4(), uuid.uuid4(), "ref1"), + (EMAIL_TYPE, False, "save_emails", "-normal-database-tasks", uuid.uuid4(), uuid.uuid4(), "ref2"), + (EMAIL_TYPE, True, "save_emails", "research-mode-tasks", None, None, None), + ], + ) + def test_process_rows_sends_save_task( + self, + notify_api, + template_type, + research_mode, + expected_function, + expected_queue, + api_key_id, + sender_id, + reference, + mocker, + ): + mocker.patch("app.celery.tasks.create_uuid", return_value="noti_uuid") + task_mock = mocker.patch("app.celery.tasks.{}".format(expected_function)) + signer_mock = mocker.patch("app.celery.tasks.signer_notification.sign") + template = MagicMock(id="template_id", template_type=template_type, process_type=NORMAL) + job = Mock(id="job_id", template_version="temp_vers", notification_count=1, api_key_id=api_key_id, sender_id=sender_id) + service = Mock(id="service_id", research_mode=research_mode) + template.__len__.return_value = 1 + + process_rows( + [ + Row( + {"foo": "bar", "to": "recip", "reference": reference} if reference else {"foo": "bar", "to": "recip"}, + index="row_num", + error_fn=lambda k, v: None, + recipient_column_headers=["to"], + placeholders={"foo"}, + template=template, + ) + ], + template, + job, + service, + ) + signer_mock.assert_called_once_with( + { + "api_key": None if api_key_id is None else str(api_key_id), + "key_type": job.api_key.key_type, + "template": "template_id", + "template_version": "temp_vers", + "job": "job_id", + "to": "recip", + "row_number": "row_num", + "personalisation": {"foo": "bar"}, + "queue": QueueNames.SEND_SMS_MEDIUM if template_type == SMS_TYPE else QueueNames.SEND_EMAIL_MEDIUM, + "client_reference": reference, + "sender_id": str(sender_id) if sender_id else None, + }, + ) + task_mock.apply_async.assert_called_once() + + @pytest.mark.parametrize( + "csv_bulk_threshold, template_process_type, expected_queue", + [ + (1_000, PRIORITY, QueueNames.SEND_EMAIL_HIGH), # keep priority when no thresholds are met + (1, PRIORITY, QueueNames.SEND_EMAIL_LOW), # autoswitch to bulk queue if bulk threshold is met, even if in priority. + (1, NORMAL, QueueNames.SEND_EMAIL_LOW), # autoswitch to bulk queue if bulk threshold is met. + (1_000, NORMAL, QueueNames.SEND_EMAIL_MEDIUM), # keep normal priority + (1, BULK, QueueNames.SEND_EMAIL_LOW), # keep bulk priority + (1_000, BULK, QueueNames.SEND_EMAIL_MEDIUM), # autoswitch to normal queue if normal threshold is met. + ], + ) + def test_should_redirect_email_job_to_queue_depending_on_csv_threshold( + self, + notify_api, + sample_job, + mocker, + fake_uuid, + csv_bulk_threshold, + template_process_type, + expected_queue, + ): + mock_save_email = mocker.patch("app.celery.tasks.save_emails") + + template = MagicMock(id=1, template_type=EMAIL_TYPE, process_type=template_process_type) + api_key = Mock(id=1, key_type=KEY_TYPE_NORMAL) + job = Mock(id=1, template_version="temp_vers", notification_count=1, api_key=api_key) + service = Mock(id=1, research_mode=False) + template.__len__.return_value = 1 + + row = next( + RecipientCSV( + load_example_csv("email"), + template_type=EMAIL_TYPE, + ).get_rows() + ) - persisted_notification = Notification.query.one() - provider_tasks.deliver_throttled_sms.apply_async.assert_called_once_with( - [str(persisted_notification.id)], queue="send-throttled-sms-tasks" - ) - mocked_deliver_sms.assert_not_called() - mocked_deliver_throttled_sms.assert_called_once() + with set_config_values(notify_api, {"CSV_BULK_REDIRECT_THRESHOLD": csv_bulk_threshold}): + process_rows([row], template, job, service) + + tasks.save_emails.apply_async.assert_called_once() + args = mock_save_email.method_calls[0].args + signed_notification = [i for i in args[0]][1][0] + notification = signer_notification.verify(signed_notification) + assert expected_queue == notification.get("queue") + + @pytest.mark.parametrize( + "csv_bulk_threshold, template_process_type, expected_queue", + [ + (1_000, PRIORITY, QueueNames.SEND_SMS_HIGH), # keep priority when no thresholds are met + (1, PRIORITY, QueueNames.SEND_SMS_LOW), # autoswitch to bulk queue if bulk threshold is met, even if in priority. + (1, NORMAL, QueueNames.SEND_SMS_LOW), # autoswitch to bulk queue if bulk threshold is met. + (1_000, NORMAL, QueueNames.SEND_SMS_MEDIUM), # keep normal priority + (1, BULK, QueueNames.SEND_SMS_LOW), # keep bulk priority + (1_000, BULK, QueueNames.SEND_SMS_MEDIUM), # autoswitch to normal queue if normal threshold is met. + ], + ) + def test_should_redirect_sms_job_to_queue_depending_on_csv_threshold( + self, + notify_api, + sample_job, + mocker, + fake_uuid, + csv_bulk_threshold, + template_process_type, + expected_queue, + ): + mock_save_sms = mocker.patch("app.celery.tasks.save_smss") + + template = MagicMock(id=1, template_type=SMS_TYPE, process_type=template_process_type) + api_key = Mock(id=1, key_type=KEY_TYPE_NORMAL) + job = Mock(id=1, template_version="temp_vers", notification_count=1, api_key=api_key) + service = Mock(id=1, research_mode=False) + template.__len__.return_value = 1 + + row = next( + RecipientCSV( + load_example_csv("sms"), + template_type=SMS_TYPE, + ).get_rows() + ) + with set_config_values(notify_api, {"CSV_BULK_REDIRECT_THRESHOLD": csv_bulk_threshold}): + process_rows([row], template, job, service) + + tasks.save_smss.apply_async.assert_called_once() + args = mock_save_sms.method_calls[0].args + signed_notification = [i for i in args[0]][1][0] + notification = signer_notification.verify(signed_notification) + assert expected_queue == notification.get("queue") + + def test_should_not_save_sms_if_restricted_service_and_invalid_number(self, notify_db_session, mocker): + user = create_user(mobile_number="6502532222") + service = create_service(user=user, restricted=True) + template = create_template(service=service) + job = create_job(template) + notification = _notification_json(template, to="07700 900849") + + save_sms_mock = mocker.patch("app.celery.tasks.save_smss") + + process_rows( + [ + Row( + {"foo": "bar", "to": notification["to"]}, + index="row_num", + error_fn=lambda k, v: None, + recipient_column_headers=["to"], + placeholders={"foo"}, + template=SMSMessageTemplate(template.__dict__), + ) + ], + template, + job, + service, + ) -def test_should_save_sms_if_restricted_service_and_valid_number(notify_db_session, mocker): - user = create_user(mobile_number="6502532222") - service = create_service(user=user, restricted=True) - template = create_template(service=service) - notification = _notification_json(template, "+16502532222") + assert not save_sms_mock.called + + @pytest.mark.parametrize( + "template_type, research_mode, expected_function, expected_queue, api_key_id, sender_id, reference", + [ + (SMS_TYPE, False, "save_smss", "-normal-database-tasks", None, None, None), + (SMS_TYPE, True, "save_smss", "research-mode-tasks", uuid.uuid4(), uuid.uuid4(), "ref1"), + (EMAIL_TYPE, False, "save_emails", "-normal-database-tasks", uuid.uuid4(), uuid.uuid4(), "ref2"), + (EMAIL_TYPE, True, "save_emails", "research-mode-tasks", None, None, None), + ], + ) + def test_process_rows_works_without_key_type( + self, + notify_api, + template_type, + research_mode, + expected_function, + expected_queue, + api_key_id, + sender_id, + reference, + mocker, + ): + mocker.patch("app.celery.tasks.create_uuid", return_value="noti_uuid") + task_mock = mocker.patch("app.celery.tasks.{}".format(expected_function)) + signer_mock = mocker.patch("app.celery.tasks.signer_notification.sign") + template = MagicMock(id="template_id", template_type=template_type, process_type=NORMAL) + template.__len__.return_value = 1 + api_key = {} + job = Mock( + id="job_id", + template_version="temp_vers", + notification_count=1, + api_key_id=api_key_id, + sender_id=sender_id, + api_key=api_key, + ) + service = Mock(id="service_id", research_mode=research_mode) + + process_rows( + [ + Row( + {"foo": "bar", "to": "recip", "reference": reference} if reference else {"foo": "bar", "to": "recip"}, + index="row_num", + error_fn=lambda k, v: None, + recipient_column_headers=["to"], + placeholders={"foo"}, + template=template, + ) + ], + template, + job, + service, + ) + signer_mock.assert_called_once_with( + { + "api_key": None if api_key_id is None else str(api_key_id), + "key_type": KEY_TYPE_NORMAL, + "template": "template_id", + "template_version": "temp_vers", + "job": "job_id", + "to": "recip", + "row_number": "row_num", + "personalisation": {"foo": "bar"}, + "queue": QueueNames.SEND_SMS_MEDIUM if template_type == SMS_TYPE else QueueNames.SEND_EMAIL_MEDIUM, + "sender_id": str(sender_id) if sender_id else None, + "client_reference": reference, + }, + ) + task_mock.apply_async.assert_called_once() - mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - notification_id = uuid.uuid4() - encrypt_notification = encryption.encrypt(notification) - save_sms( - service.id, - notification_id, - encrypt_notification, - ) +class TestSaveSmss: + def test_should_send_template_to_correct_sms_task_and_persist(self, sample_template_with_placeholders, mocker): + notification = _notification_json( + sample_template_with_placeholders, + to="+1 650 253 2222", + personalisation={"name": "Jo"}, + ) - persisted_notification = Notification.query.one() - assert persisted_notification.to == "+16502532222" - assert persisted_notification.template_id == template.id - assert persisted_notification.template_version == template.version - assert persisted_notification.status == "created" - assert persisted_notification.created_at <= datetime.utcnow() - assert not persisted_notification.sent_at - assert not persisted_notification.sent_by - assert not persisted_notification.job_id - assert not persisted_notification.personalisation - assert persisted_notification.notification_type == "sms" - provider_tasks.deliver_sms.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="send-sms-tasks") - - -def test_save_email_should_save_default_email_reply_to_text_on_notification(notify_db_session, mocker): - service = create_service() - create_reply_to_email(service=service, email_address="reply_to@digital.gov.uk", is_default=True) - template = create_template(service=service, template_type="email", subject="Hello") - - notification = _notification_json(template, to="test@example.com") - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - - notification_id = uuid.uuid4() - save_email( - service.id, - notification_id, - encryption.encrypt(notification), - ) + mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - persisted_notification = Notification.query.one() - assert persisted_notification.reply_to_text == "reply_to@digital.gov.uk" + save_smss( + sample_template_with_placeholders.service_id, + [signer_notification.sign(notification)], + uuid.uuid4(), + ) + persisted_notification = Notification.query.one() + assert persisted_notification.to == "+1 650 253 2222" + assert persisted_notification.template_id == sample_template_with_placeholders.id + assert persisted_notification.template_version == sample_template_with_placeholders.version + assert persisted_notification.status == "created" + assert persisted_notification.created_at <= datetime.utcnow() + assert not persisted_notification.sent_at + assert not persisted_notification.sent_by + assert not persisted_notification.job_id + assert persisted_notification.personalisation == {"name": "Jo"} + assert persisted_notification._personalisation == signer_personalisation.sign({"name": "Jo"}) + assert persisted_notification.notification_type == "sms" + mocked_deliver_sms.assert_called_once_with([str(persisted_notification.id)], queue=QueueNames.SEND_SMS_MEDIUM) + + @pytest.mark.parametrize("sender_id", [None, "996958a8-0c06-43be-a40e-56e4a2d1655c"]) + def test_save_sms_should_use_redis_cache_to_retrieve_service_and_template_when_possible( + self, sample_template_with_placeholders, mocker, sample_service, sender_id + ): + notification = _notification_json( + sample_template_with_placeholders, + to="+1 650 253 2222", + personalisation={"name": "Jo"}, + ) + if sender_id: + notification["sender_id"] = sender_id + + sms_sender = ServiceSmsSender() + sms_sender.sms_sender = "6135550123" + mocked_get_sender_id = mocker.patch("app.celery.tasks.dao_get_service_sms_senders_by_id", return_value=sms_sender) + celery_task = "deliver_throttled_sms" if sender_id else "deliver_sms" + mocked_deliver_sms = mocker.patch(f"app.celery.provider_tasks.{celery_task}.apply_async") + json_template_date = {"data": template_schema.dump(sample_template_with_placeholders)} + json_service_data = {"data": service_schema.dump(sample_service)} + mocked_redis_get = mocker.patch.object(redis_store, "get") + + mocked_redis_get.side_effect = [ + bytes(json.dumps(json_service_data, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8"), + bytes( + json.dumps(json_template_date, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8" + ), + bytes( + json.dumps(json_template_date, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8" + ), + bytes(json.dumps(json_service_data, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8"), + False, + False, + False, + ] + mocker.patch("app.notifications.process_notifications.choose_queue", return_value="sms_queue") + save_smss(sample_template_with_placeholders.service_id, [signer_notification.sign(notification)], uuid.uuid4()) + + assert mocked_redis_get.called + persisted_notification = Notification.query.one() + assert persisted_notification.to == "+1 650 253 2222" + assert persisted_notification.template_id == sample_template_with_placeholders.id + assert persisted_notification.template_version == sample_template_with_placeholders.version + assert persisted_notification.status == "created" + assert persisted_notification.created_at <= datetime.utcnow() + assert not persisted_notification.sent_at + assert not persisted_notification.sent_by + assert not persisted_notification.job_id + assert persisted_notification.personalisation == {"name": "Jo"} + assert persisted_notification._personalisation == signer_personalisation.sign({"name": "Jo"}) + assert persisted_notification.notification_type == "sms" + assert persisted_notification.reply_to_text == (f"+1{sms_sender.sms_sender}" if sender_id else None) + + mocked_deliver_sms.assert_called_once_with( + [str(persisted_notification.id)], queue="send-throttled-sms-tasks" if sender_id else QueueNames.SEND_SMS_MEDIUM + ) + if sender_id: + mocked_get_sender_id.assert_called_once_with(persisted_notification.service_id, sender_id) -def test_save_sms_should_save_default_smm_sender_notification_reply_to_text_on(notify_db_session, mocker): - service = create_service_with_defined_sms_sender(sms_sender_value="12345") - template = create_template(service=service) + def test_should_put_save_sms_task_in_research_mode_queue_if_research_mode_service(self, notify_db, notify_db_session, mocker): + service = create_service( + research_mode=True, + ) - notification = _notification_json(template, to="6502532222") - mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + template = create_template(service=service) - notification_id = uuid.uuid4() - save_sms( - service.id, - notification_id, - encryption.encrypt(notification), - ) + notification = _notification_json(template, to="+1 650 253 2222") - persisted_notification = Notification.query.one() - assert persisted_notification.reply_to_text == "12345" + mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + notification_id = uuid.uuid4() -def test_should_put_save_email_task_in_research_mode_queue_if_research_mode_service(notify_db_session, mocker): - service = create_service(research_mode=True) + save_smss(template.service_id, [signer_notification.sign(notification)], notification_id) + persisted_notification = Notification.query.one() + provider_tasks.deliver_sms.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue="research-mode-tasks" + ) + assert mocked_deliver_sms.called - template = create_template(service=service, template_type="email") + @pytest.mark.parametrize("process_type", ["priority", "bulk"]) + def test_should_route_save_sms_task_to_appropriate_queue_according_to_template_process_type( + self, notify_db, notify_db_session, mocker, process_type + ): + service = create_service() + template = create_template(service=service, process_type=process_type) + notification = _notification_json(template, to="+1 650 253 2222") - notification = _notification_json(template, to="test@test.com") + mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + notification_id = uuid.uuid4() - notification_id = uuid.uuid4() + save_smss( + template.service_id, + [signer_notification.sign(notification)], + notification_id, + ) + persisted_notification = Notification.query.one() + if process_type == "priority": + provider_tasks.deliver_sms.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_SMS_HIGH + ) + else: + provider_tasks.deliver_sms.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_SMS_LOW + ) + assert mocked_deliver_sms.called - save_email( - template.service_id, - notification_id, - encryption.encrypt(notification), - ) + def test_should_route_save_sms_task_to_bulk_on_large_csv_file(self, notify_db, notify_db_session, mocker): + service = create_service() + template = create_template(service=service, process_type="normal") + notification = _notification_json(template, to="+1 650 253 2222", queue=QueueNames.SEND_SMS_LOW) - persisted_notification = Notification.query.one() - provider_tasks.deliver_email.apply_async.assert_called_once_with( - [str(persisted_notification.id)], queue="research-mode-tasks" - ) + mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + notification_id = uuid.uuid4() -@pytest.mark.parametrize("process_type", ["priority", "bulk"]) -def test_should_route_save_email_task_to_appropriate_queue_according_to_template_process_type( - notify_db_session, mocker, process_type -): - service = create_service() - template = create_template(service=service, template_type="email", process_type=process_type) - notification = _notification_json(template, to="test@test.com") + save_smss( + template.service_id, + [signer_notification.sign(notification)], + notification_id, + ) + persisted_notification = Notification.query.one() + provider_tasks.deliver_sms.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_SMS_LOW + ) + assert mocked_deliver_sms.called - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + def test_should_route_save_sms_task_to_throttled_queue_on_large_csv_file_if_custom_sms_sender( + self, notify_db, notify_db_session, mocker + ): + service = create_service_with_defined_sms_sender(sms_sender_value="3433061234") + template = create_template(service=service, process_type="normal") + notification = _notification_json(template, to="+1 650 253 2222", queue=QueueNames.SEND_SMS_LOW) - notification_id = uuid.uuid4() + mocked_deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_throttled_sms.apply_async") + mocked_deliver_throttled_sms = mocker.patch("app.celery.provider_tasks.deliver_throttled_sms.apply_async") - save_email( - template.service_id, - notification_id, - encryption.encrypt(notification), - ) + notification_id = uuid.uuid4() - persisted_notification = Notification.query.one() - provider_tasks.deliver_email.apply_async.assert_called_once_with( - [str(persisted_notification.id)], queue=f"{process_type}-tasks" - ) + save_smss(template.service_id, [signer_notification.sign(notification)], notification_id) + persisted_notification = Notification.query.one() + provider_tasks.deliver_throttled_sms.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue="send-throttled-sms-tasks" + ) + mocked_deliver_sms.assert_not_called() + mocked_deliver_throttled_sms.assert_called_once() + + def test_should_save_sms_if_restricted_service_and_valid_number(self, notify_db_session, mocker): + user = create_user(mobile_number="6502532222") + service = create_service(user=user, restricted=True) + template = create_template(service=service) + notification = _notification_json(template, "+16502532222") + + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + + notification_id = uuid.uuid4() + save_smss(template.service_id, [signer_notification.sign(notification)], notification_id) + + persisted_notification = Notification.query.one() + assert persisted_notification.to == "+16502532222" + assert persisted_notification.template_id == template.id + assert persisted_notification.template_version == template.version + assert persisted_notification.status == "created" + assert persisted_notification.created_at <= datetime.utcnow() + assert not persisted_notification.sent_at + assert not persisted_notification.sent_by + assert not persisted_notification.job_id + assert not persisted_notification.personalisation + assert persisted_notification.notification_type == "sms" + provider_tasks.deliver_sms.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_SMS_MEDIUM + ) -def test_should_route_save_email_task_to_bulk_on_large_csv_file(notify_db_session, mocker): - service = create_service() - template = create_template(service=service, template_type="email", process_type="normal") - notification = _notification_json(template, to="test@test.com", queue="bulk-tasks") + def test_save_sms_should_save_default_smm_sender_notification_reply_to_text_on(self, notify_db_session, mocker): + service = create_service_with_defined_sms_sender(sms_sender_value="12345") + template = create_template(service=service) + + notification = _notification_json(template, to="6502532222") + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + + notification_id = uuid.uuid4() + save_smss(template.service_id, [signer_notification.sign(notification)], notification_id) + + persisted_notification = Notification.query.one() + assert persisted_notification.reply_to_text == "12345" + + def test_should_save_sms_template_to_and_persist_with_job_id(self, notify_api, sample_job, mocker): + notification = _notification_json(sample_job.template, to="+1 650 253 2222", job_id=sample_job.id, row_number=2) + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + + notification_id = uuid.uuid4() + now = datetime.utcnow() + + save_smss(sample_job.template.service_id, [signer_notification.sign(notification)], notification_id) + persisted_notification = Notification.query.one() + assert persisted_notification.to == "+1 650 253 2222" + assert persisted_notification.job_id == sample_job.id + assert persisted_notification.template_id == sample_job.template.id + assert persisted_notification.status == "created" + assert not persisted_notification.sent_at + assert persisted_notification.created_at >= now + assert not persisted_notification.sent_by + assert persisted_notification.job_row_number == 2 + assert persisted_notification.api_key_id is None + assert persisted_notification.key_type == KEY_TYPE_NORMAL + assert persisted_notification.notification_type == "sms" + + provider_tasks.deliver_sms.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_SMS_MEDIUM + ) - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + def test_save_sms_should_go_to_retry_queue_if_database_errors(self, sample_template, mocker): + notification = _notification_json(sample_template, "+1 650 253 2222") - notification_id = uuid.uuid4() + expected_exception = SQLAlchemyError() - save_email( - template.service_id, - notification_id, - encryption.encrypt(notification), - ) + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + mocker.patch("app.celery.tasks.save_smss.retry", side_effect=Retry) + mocker.patch("app.celery.tasks.save_smss.max_retries", return_value=4) + mocker.patch( + "app.notifications.process_notifications.bulk_insert_notifications", + side_effect=expected_exception, + ) - persisted_notification = Notification.query.one() - provider_tasks.deliver_email.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="bulk-tasks") + notification_id = uuid.uuid4() + + with pytest.raises(Retry): + save_smss(sample_template.service_id, [signer_notification.sign(notification)], notification_id) + assert provider_tasks.deliver_sms.apply_async.called is False + tasks.save_smss.retry.assert_called_with(exc=expected_exception, queue="retry-tasks") + + assert Notification.query.count() == 0 + + def test_save_sms_does_not_send_duplicate_and_does_not_put_in_retry_queue(self, sample_notification, mocker): + json = _notification_json(sample_notification.template, "6502532222", job_id=uuid.uuid4(), row_number=1) + deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + retry = mocker.patch("app.celery.tasks.save_smss.retry", side_effect=Exception()) + notification_id = str(sample_notification.id) + json["id"] = str(sample_notification.id) + + save_smss(sample_notification.service_id, [signer_notification.sign(json)], notification_id) + assert Notification.query.count() == 1 + assert not deliver_sms.called + assert not retry.called + + def test_save_sms_uses_sms_sender_reply_to_text(self, mocker, notify_db_session): + service = create_service_with_defined_sms_sender(sms_sender_value="6502532222") + template = create_template(service=service) + + notification = _notification_json(template, to="6502532222") + mocker.patch("app.celery.provider_tasks.deliver_throttled_sms.apply_async") + + notification_id = uuid.uuid4() + save_smss(service.id, [signer_notification.sign(notification)], notification_id) + + persisted_notification = Notification.query.one() + assert persisted_notification.reply_to_text == "+16502532222" + + def test_save_sms_uses_non_default_sms_sender_reply_to_text_if_provided(self, mocker, notify_db_session): + service = create_service_with_defined_sms_sender(sms_sender_value="07123123123") + template = create_template(service=service) + new_sender = service_sms_sender_dao.dao_add_sms_sender_for_service(service.id, "new-sender", False) + + notification = _notification_json(template, to="6502532222") + notification["sender_id"] = str(new_sender.id) + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + + notification_id = uuid.uuid4() + save_smss(service.id, [signer_notification.sign(notification)], notification_id) + persisted_notification = Notification.query.one() + assert persisted_notification.reply_to_text == "new-sender" + + +class TestSaveErrorHandling: + def test_handler_send_1notification(self, sample_template, mocker): + n1 = _notification_json(sample_template, "+1 650 253 2222") + n1["notification_id"] = str(uuid.uuid4()) + service = dao_fetch_service_by_id(sample_template.service_id) + n1["service"] = service + n1["template_id"] = str(sample_template.id) + expected_exception = SQLAlchemyError() + + retry_func = mocker.patch("app.celery.tasks.save_smss.retry") + mocker.patch("app.celery.tasks.save_smss.apply_async", side_effect=handle_batch_error_and_forward) + mocker.patch( + "app.notifications.process_notifications.bulk_insert_notifications", + side_effect=expected_exception, + ) + receipt_id = uuid.uuid4() + + signed_notifications = [1] + verified_notifications = [n1] + signed_and_verified = list(zip(signed_notifications, verified_notifications)) + handle_batch_error_and_forward(save_smss, signed_and_verified, SMS_TYPE, expected_exception, receipt_id, sample_template) + retry_func.assert_called_with(exc=expected_exception, queue="retry-tasks") + + def test_handler_send_3notifications(self, sample_template, mocker): + n1 = _notification_json(sample_template, "+1 650 253 2222") + n2 = _notification_json(sample_template, "+1 234 456 7890") + n3 = _notification_json(sample_template, "+1 345 567 7890") + n1["notification_id"] = str(uuid.uuid4()) + n2["notification_id"] = str(uuid.uuid4()) + n3["notification_id"] = str(uuid.uuid4()) + service = dao_fetch_service_by_id(sample_template.service_id) + n1["service"] = service + n2["service"] = service + n3["service"] = service + n1["template_id"] = str(sample_template.id) + n2["template_id"] = str(sample_template.id) + n3["template_id"] = str(sample_template.id) + expected_exception = SQLAlchemyError() + + save_func = mocker.patch("app.celery.tasks.save_smss.apply_async") + + receipt_id = uuid.uuid4() + + signed_notifications = [1, 2, 3] + verified_notifications = [n1, n2, n3] + signed_and_verified = list(zip(signed_notifications, verified_notifications)) + handle_batch_error_and_forward(save_smss, signed_and_verified, SMS_TYPE, expected_exception, receipt_id, sample_template) + + assert save_func.call_count == 3 + assert save_func.call_args_list == [ + call((service.id, [1], None), queue="-normal-database-tasks"), + call((service.id, [2], None), queue="-normal-database-tasks"), + call((service.id, [3], None), queue="-normal-database-tasks"), + ] + + def test_should_forward_sms_on_error(self, sample_template_with_placeholders, mocker): + notification1 = _notification_json( + sample_template_with_placeholders, + to="+1 650 253 2221", + personalisation={"name": "Jo"}, + ) + notification1["id"] = str(uuid.uuid4()) + notification1["service_id"] = str(sample_template_with_placeholders.service.id) + expected_error = IntegrityError(None, None, None) + mock_persist_notifications = mocker.patch("app.celery.tasks.persist_notifications", side_effect=expected_error) + mock_save_sms = mocker.patch("app.celery.tasks.save_smss.retry") + mock_acknowldege = mocker.patch("app.sms_normal.acknowledge") + + receipt = uuid.uuid4() + notifications = [signer_notification.sign(notification1)] + + save_smss( + str(sample_template_with_placeholders.service.id), + notifications, + receipt, + ) -def test_should_save_sms_template_to_and_persist_with_job_id(sample_job, mocker): - notification = _notification_json(sample_job.template, to="+1 650 253 2222", job_id=sample_job.id, row_number=2) - mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - mock_over_daily_limit = mocker.patch("app.celery.tasks.check_service_over_daily_message_limit") + mock_persist_notifications.assert_called_once() + mock_save_sms.assert_called_with(queue="retry-tasks", exc=expected_error) + mock_acknowldege.assert_called_once_with(receipt) - notification_id = uuid.uuid4() - now = datetime.utcnow() - save_sms( - sample_job.service.id, - notification_id, - encryption.encrypt(notification), - ) - persisted_notification = Notification.query.one() - assert persisted_notification.to == "+1 650 253 2222" - assert persisted_notification.job_id == sample_job.id - assert persisted_notification.template_id == sample_job.template.id - assert persisted_notification.status == "created" - assert not persisted_notification.sent_at - assert persisted_notification.created_at >= now - assert not persisted_notification.sent_by - assert persisted_notification.job_row_number == 2 - assert persisted_notification.api_key_id is None - assert persisted_notification.key_type == KEY_TYPE_NORMAL - assert persisted_notification.notification_type == "sms" - - provider_tasks.deliver_sms.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="send-sms-tasks") - mock_over_daily_limit.assert_called_once_with("normal", sample_job.service) - - -def test_should_use_email_template_and_persist(sample_email_template_with_placeholders, sample_api_key, mocker): - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - mock_over_daily_limit = mocker.patch("app.celery.tasks.check_service_over_daily_message_limit") - - now = datetime(2016, 1, 1, 11, 9, 0) - notification_id = uuid.uuid4() - - with freeze_time("2016-01-01 12:00:00.000000"): - notification = _notification_json( + def test_should_forward_email_on_error(self, sample_email_template_with_placeholders, mocker): + notification1 = _notification_json( sample_email_template_with_placeholders, - "my_email@my_email.com", - {"name": "Jo"}, - row_number=1, + to="test1@gmail.com", + personalisation={"name": "Jo"}, ) + notification1["id"] = str(uuid.uuid4()) + notification1["service_id"] = str(sample_email_template_with_placeholders.service.id) - with freeze_time("2016-01-01 11:10:00.00000"): - save_email( - sample_email_template_with_placeholders.service_id, - notification_id, - encryption.encrypt(notification), - ) - - persisted_notification = Notification.query.one() - assert persisted_notification.to == "my_email@my_email.com" - assert persisted_notification.template_id == sample_email_template_with_placeholders.id - assert persisted_notification.template_version == sample_email_template_with_placeholders.version - assert persisted_notification.created_at >= now - assert not persisted_notification.sent_at - assert persisted_notification.status == "created" - assert not persisted_notification.sent_by - assert persisted_notification.job_row_number == 1 - assert persisted_notification.personalisation == {"name": "Jo"} - assert persisted_notification._personalisation == encryption.encrypt({"name": "Jo"}) - assert persisted_notification.api_key_id is None - assert persisted_notification.key_type == KEY_TYPE_NORMAL - assert persisted_notification.notification_type == "email" - - provider_tasks.deliver_email.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="send-email-tasks") - mock_over_daily_limit.assert_called_once_with("normal", sample_email_template_with_placeholders.service) - - -def test_save_email_should_use_template_version_from_job_not_latest(sample_email_template, mocker): - notification = _notification_json(sample_email_template, "my_email@my_email.com") - version_on_notification = sample_email_template.version - # Change the template - from app.dao.templates_dao import dao_get_template_by_id, dao_update_template - - sample_email_template.content = sample_email_template.content + " another version of the template" - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - dao_update_template(sample_email_template) - t = dao_get_template_by_id(sample_email_template.id) - assert t.version > version_on_notification - now = datetime.utcnow() - save_email( - sample_email_template.service_id, - uuid.uuid4(), - encryption.encrypt(notification), - ) - - persisted_notification = Notification.query.one() - assert persisted_notification.to == "my_email@my_email.com" - assert persisted_notification.template_id == sample_email_template.id - assert persisted_notification.template_version == version_on_notification - assert persisted_notification.created_at >= now - assert not persisted_notification.sent_at - assert persisted_notification.status == "created" - assert not persisted_notification.sent_by - assert persisted_notification.notification_type == "email" - provider_tasks.deliver_email.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="send-email-tasks") - - -def test_should_use_email_template_subject_placeholders(sample_email_template_with_placeholders, mocker): - notification = _notification_json(sample_email_template_with_placeholders, "my_email@my_email.com", {"name": "Jo"}) - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - - notification_id = uuid.uuid4() - now = datetime.utcnow() - save_email( - sample_email_template_with_placeholders.service_id, - notification_id, - encryption.encrypt(notification), - ) - persisted_notification = Notification.query.one() - assert persisted_notification.to == "my_email@my_email.com" - assert persisted_notification.template_id == sample_email_template_with_placeholders.id - assert persisted_notification.status == "created" - assert persisted_notification.created_at >= now - assert not persisted_notification.sent_by - assert persisted_notification.personalisation == {"name": "Jo"} - assert not persisted_notification.reference - assert persisted_notification.notification_type == "email" - provider_tasks.deliver_email.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="send-email-tasks") - - -def test_save_email_uses_the_reply_to_text_when_provided(sample_email_template, mocker): - notification = _notification_json(sample_email_template, "my_email@my_email.com") - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - - service = sample_email_template.service - notification_id = uuid.uuid4() - service_email_reply_to_dao.add_reply_to_email_address_for_service(service.id, "default@example.com", True) - other_email_reply_to = service_email_reply_to_dao.add_reply_to_email_address_for_service( - service.id, "other@example.com", False - ) - - save_email( - sample_email_template.service_id, - notification_id, - encryption.encrypt(notification), - sender_id=other_email_reply_to.id, - ) - persisted_notification = Notification.query.one() - assert persisted_notification.notification_type == "email" - assert persisted_notification.reply_to_text == "other@example.com" - - -def test_save_email_uses_the_default_reply_to_text_if_sender_id_is_none(sample_email_template, mocker): - notification = _notification_json(sample_email_template, "my_email@my_email.com") - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - - service = sample_email_template.service - notification_id = uuid.uuid4() - service_email_reply_to_dao.add_reply_to_email_address_for_service(service.id, "default@example.com", True) - - save_email( - sample_email_template.service_id, - notification_id, - encryption.encrypt(notification), - sender_id=None, - ) - persisted_notification = Notification.query.one() - assert persisted_notification.notification_type == "email" - assert persisted_notification.reply_to_text == "default@example.com" - - -def test_should_use_email_template_and_persist_without_personalisation(sample_email_template, mocker): - notification = _notification_json(sample_email_template, "my_email@my_email.com") - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + expected_error = IntegrityError(None, None, None) + mock_persist_notifications = mocker.patch("app.celery.tasks.persist_notifications", side_effect=expected_error) + mock_save_email = mocker.patch("app.celery.tasks.save_emails.retry") + mock_acknowldege = mocker.patch("app.email_normal.acknowledge") - notification_id = uuid.uuid4() + receipt = uuid.uuid4() + notifications = [signer_notification.sign(notification1)] - now = datetime.utcnow() - save_email( - sample_email_template.service_id, - notification_id, - encryption.encrypt(notification), - ) - persisted_notification = Notification.query.one() - assert persisted_notification.to == "my_email@my_email.com" - assert persisted_notification.template_id == sample_email_template.id - assert persisted_notification.created_at >= now - assert not persisted_notification.sent_at - assert persisted_notification.status == "created" - assert not persisted_notification.sent_by - assert not persisted_notification.personalisation - assert not persisted_notification.reference - assert persisted_notification.notification_type == "email" - provider_tasks.deliver_email.apply_async.assert_called_once_with([str(persisted_notification.id)], queue="send-email-tasks") - - -def test_save_sms_should_go_to_retry_queue_if_database_errors(sample_template, mocker): - notification = _notification_json(sample_template, "+1 650 253 2222") - - expected_exception = SQLAlchemyError() - - mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - mocker.patch("app.celery.tasks.save_sms.retry", side_effect=Retry) - mocker.patch( - "app.notifications.process_notifications.dao_create_notification", - side_effect=expected_exception, - ) - - notification_id = uuid.uuid4() - - with pytest.raises(Retry): - save_sms( - sample_template.service_id, - notification_id, - encryption.encrypt(notification), + save_emails( + str(sample_email_template_with_placeholders.service.id), + notifications, + receipt, ) - assert provider_tasks.deliver_sms.apply_async.called is False - tasks.save_sms.retry.assert_called_with(exc=expected_exception, queue="retry-tasks") - - assert Notification.query.count() == 0 - - -def test_save_email_should_go_to_retry_queue_if_database_errors(sample_email_template, mocker): - notification = _notification_json(sample_email_template, "test@example.gov.uk") - - expected_exception = SQLAlchemyError() - - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - mocker.patch("app.celery.tasks.save_email.retry", side_effect=Retry) - mocker.patch( - "app.notifications.process_notifications.dao_create_notification", - side_effect=expected_exception, - ) - notification_id = uuid.uuid4() - - with pytest.raises(Retry): - save_email( - sample_email_template.service_id, - notification_id, - encryption.encrypt(notification), + mock_persist_notifications.assert_called_once() + mock_save_email.assert_called_with(queue="retry-tasks", exc=expected_error) + mock_acknowldege.assert_called_once_with(receipt) + + +class TestSaveEmails: + @pytest.mark.parametrize("sender_id", [None, "996958a8-0c06-43be-a40e-56e4a2d1655c"]) + def test_save_emails_should_use_redis_cache_to_retrieve_service_and_template_when_possible( + self, sample_service, mocker, sender_id + ): + sample_template = create_template( + template_name="Test Template", + template_type="email", + content="Hello (( Name))\nYour thing is due soon", + service=sample_service, ) - assert not provider_tasks.deliver_email.apply_async.called - tasks.save_email.retry.assert_called_with(exc=expected_exception, queue="retry-tasks") - - assert Notification.query.count() == 0 + notification = _notification_json( + sample_template, + to="test@unittest.com", + personalisation={"name": "Jo"}, + ) -def test_save_email_does_not_send_duplicate_and_does_not_put_in_retry_queue(sample_notification, mocker): - json = _notification_json( - sample_notification.template, - sample_notification.to, - job_id=uuid.uuid4(), - row_number=1, - ) - deliver_email = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - retry = mocker.patch("app.celery.tasks.save_email.retry", side_effect=Exception()) - - notification_id = sample_notification.id + if sender_id: + notification["sender_id"] = sender_id + + reply_to = ServiceEmailReplyTo() + reply_to.email_address = "notify@digital.cabinet-office.gov.uk" + mocked_get_sender_id = mocker.patch("app.celery.tasks.dao_get_reply_to_by_id", return_value=reply_to) + mocked_deliver_email = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + + json_template_date = {"data": template_schema.dump(sample_template)} + json_service_data = {"data": service_schema.dump(sample_service)} + mocked_redis_get = mocker.patch.object(redis_store, "get") + + mocked_redis_get.side_effect = [ + bytes(json.dumps(json_service_data, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8"), + bytes( + json.dumps(json_template_date, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8" + ), + bytes( + json.dumps(json_template_date, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8" + ), + bytes(json.dumps(json_service_data, default=lambda o: o.hex if isinstance(o, uuid.UUID) else None), encoding="utf-8"), + False, + False, + ] + mocker.patch("app.notifications.process_notifications.choose_queue", return_value="email_normal_queue") + + save_emails(sample_template.service_id, [signer_notification.sign(notification)], uuid.uuid4()) + assert mocked_redis_get.called + persisted_notification = Notification.query.one() + assert persisted_notification.to == "test@unittest.com" + assert persisted_notification.template_id == sample_template.id + assert persisted_notification.template_version == sample_template.version + assert persisted_notification.status == "created" + assert persisted_notification.created_at <= datetime.utcnow() + assert not persisted_notification.sent_at + assert not persisted_notification.sent_by + assert not persisted_notification.job_id + assert persisted_notification.personalisation == {"name": "Jo"} + assert persisted_notification._personalisation == signer_personalisation.sign({"name": "Jo"}) + assert persisted_notification.notification_type == "email" + mocked_deliver_email.assert_called_once_with([str(persisted_notification.id)], queue=QueueNames.SEND_EMAIL_MEDIUM) + if sender_id: + mocked_get_sender_id.assert_called_once_with(persisted_notification.service_id, sender_id) + + def test_save_email_should_save_default_email_reply_to_text_on_notification(self, notify_db_session, mocker): + service = create_service() + create_reply_to_email(service=service, email_address="reply_to@digital.gov.uk", is_default=True) + template = create_template(service=service, template_type="email", subject="Hello") + + notification = _notification_json(template, to="test@example.com") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + + notification_id = uuid.uuid4() + save_emails(service.id, [signer_notification.sign(notification)], notification_id) + + persisted_notification = Notification.query.one() + assert persisted_notification.reply_to_text == "reply_to@digital.gov.uk" + + def test_save_email_should_save_non_default_email_reply_to_text_on_notification_when_set(self, notify_db_session, mocker): + service = create_service() + create_reply_to_email(service=service, email_address="reply_to@digital.gov.uk", is_default=True) + create_reply_to_email(service=service, email_address="reply_two@digital.gov.uk", is_default=False) + template = create_template(service=service, template_type="email", subject="Hello") + + notification = _notification_json(template, to="test@example.com", reply_to_text="reply_two@digital.gov.uk") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + + notification_id = uuid.uuid4() + save_emails(service.id, [signer_notification.sign(notification)], notification_id) + + persisted_notification = Notification.query.one() + assert persisted_notification.reply_to_text == "reply_two@digital.gov.uk" + + def test_should_put_save_email_task_in_research_mode_queue_if_research_mode_service(self, notify_db_session, mocker): + service = create_service(research_mode=True) + + template = create_template(service=service, template_type="email") + + notification = _notification_json(template, to="test@test.com") + + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + + notification_id = uuid.uuid4() + + save_emails(service.id, [signer_notification.sign(notification)], notification_id) + + persisted_notification = Notification.query.one() + provider_tasks.deliver_email.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue="research-mode-tasks" + ) - save_email( - sample_notification.service_id, - notification_id, - encryption.encrypt(json), + @pytest.mark.parametrize( + "process_type,expected_queue", [("priority", QueueNames.SEND_EMAIL_HIGH), ("bulk", QueueNames.SEND_EMAIL_LOW)] ) - assert Notification.query.count() == 1 - assert not deliver_email.called - assert not retry.called + def test_should_route_save_email_task_to_appropriate_queue_according_to_template_process_type( + self, notify_db_session, mocker, process_type, expected_queue + ): + service = create_service() + template = create_template(service=service, template_type="email", process_type=process_type) + notification = _notification_json(template, to="test@test.com") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") -def test_save_sms_does_not_send_duplicate_and_does_not_put_in_retry_queue(sample_notification, mocker): - json = _notification_json(sample_notification.template, "6502532222", job_id=uuid.uuid4(), row_number=1) - deliver_sms = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - retry = mocker.patch("app.celery.tasks.save_sms.retry", side_effect=Exception()) + notification_id = uuid.uuid4() - notification_id = sample_notification.id + save_emails(service.id, [signer_notification.sign(notification)], notification_id) - save_sms( - sample_notification.service_id, - notification_id, - encryption.encrypt(json), - ) - assert Notification.query.count() == 1 - assert not deliver_sms.called - assert not retry.called + persisted_notification = Notification.query.one() + provider_tasks.deliver_email.apply_async.assert_called_once_with([str(persisted_notification.id)], queue=expected_queue) + def test_should_route_save_email_task_to_bulk_on_large_csv_file(self, notify_db_session, mocker): + service = create_service() + template = create_template(service=service, template_type="email", process_type="normal") + notification = _notification_json(template, to="test@test.com", queue=QueueNames.SEND_EMAIL_LOW) -def test_save_letter_saves_letter_to_database(mocker, notify_db_session): - service = create_service() - contact_block = create_letter_contact(service=service, contact_block="Address contact", is_default=True) - template = create_template(service=service, template_type=LETTER_TYPE, reply_to=contact_block.id) - job = create_job(template=template) + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - mocker.patch( - "app.celery.tasks.create_random_identifier", - return_value="this-is-random-in-real-life", - ) - mocker.patch("app.celery.tasks.letters_pdf_tasks.create_letters_pdf.apply_async") - mock_over_daily_limit = mocker.patch("app.celery.tasks.check_service_over_daily_message_limit") - - personalisation = { - "addressline1": "Foo", - "addressline2": "Bar", - "addressline3": "Baz", - "addressline4": "Wibble", - "addressline5": "Wobble", - "addressline6": "Wubble", - "postcode": "Flob", - } - notification_json = _notification_json( - template=job.template, - to="Foo", - personalisation=personalisation, - job_id=job.id, - row_number=1, - ) - notification_id = uuid.uuid4() - created_at = datetime.utcnow() + notification_id = uuid.uuid4() - save_letter( - job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) + save_emails(service.id, [signer_notification.sign(notification)], notification_id) - notification_db = Notification.query.one() - assert notification_db.id == notification_id - assert notification_db.to == "Foo" - assert notification_db.job_id == job.id - assert notification_db.template_id == job.template.id - assert notification_db.template_version == job.template.version - assert notification_db.status == "created" - assert notification_db.created_at >= created_at - assert notification_db.notification_type == "letter" - assert notification_db.sent_at is None - assert notification_db.sent_by is None - assert notification_db.personalisation == personalisation - assert notification_db.reference == "this-is-random-in-real-life" - assert notification_db.reply_to_text == contact_block.contact_block - - mock_over_daily_limit.assert_called_once_with("normal", service) - - -@pytest.mark.parametrize("postage", ["first", "second"]) -def test_save_letter_saves_letter_to_database_with_correct_postage(mocker, notify_db_session, postage): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service=service, template_type=LETTER_TYPE, postage=postage) - letter_job = create_job(template=template) - - mocker.patch("app.celery.tasks.letters_pdf_tasks.create_letters_pdf.apply_async") - notification_json = _notification_json( - template=letter_job.template, - to="Foo", - personalisation={ - "addressline1": "Foo", - "addressline2": "Bar", - "postcode": "Flob", - }, - job_id=letter_job.id, - row_number=1, - ) - notification_id = uuid.uuid4() - save_letter( - letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) + persisted_notification = Notification.query.one() + provider_tasks.deliver_email.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_EMAIL_LOW + ) - notification_db = Notification.query.one() - assert notification_db.id == notification_id - assert notification_db.postage == postage + def test_should_use_email_template_and_persist( + self, notify_api, sample_email_template_with_placeholders, sample_api_key, mocker + ): + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + now = datetime(2016, 1, 1, 11, 9, 0) + notification_id = uuid.uuid4() -def test_save_letter_saves_letter_to_database_right_reply_to(mocker, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Address contact", is_default=True) - template = create_template(service=service, template_type=LETTER_TYPE, reply_to=None) - job = create_job(template=template) + with freeze_time("2016-01-01 12:00:00.000000"): + notification = _notification_json( + sample_email_template_with_placeholders, + "my_email@my_email.com", + {"name": "Jo"}, + row_number=1, + ) - mocker.patch( - "app.celery.tasks.create_random_identifier", - return_value="this-is-random-in-real-life", - ) - mocker.patch("app.celery.tasks.letters_pdf_tasks.create_letters_pdf.apply_async") - - personalisation = { - "addressline1": "Foo", - "addressline2": "Bar", - "addressline3": "Baz", - "addressline4": "Wibble", - "addressline5": "Wobble", - "addressline6": "Wubble", - "postcode": "Flob", - } - notification_json = _notification_json( - template=job.template, - to="Foo", - personalisation=personalisation, - job_id=job.id, - row_number=1, - ) - notification_id = uuid.uuid4() - created_at = datetime.utcnow() + with freeze_time("2016-01-01 11:10:00.00000"): + save_emails( + sample_email_template_with_placeholders.service_id, [signer_notification.sign(notification)], notification_id + ) - save_letter( - job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) + persisted_notification = Notification.query.one() + assert persisted_notification.to == "my_email@my_email.com" + assert persisted_notification.template_id == sample_email_template_with_placeholders.id + assert persisted_notification.template_version == sample_email_template_with_placeholders.version + assert persisted_notification.created_at >= now + assert not persisted_notification.sent_at + assert persisted_notification.status == "created" + assert not persisted_notification.sent_by + assert persisted_notification.job_row_number == 1 + assert persisted_notification.personalisation == {"name": "Jo"} + assert persisted_notification._personalisation == signer_personalisation.sign({"name": "Jo"}) + assert persisted_notification.api_key_id is None + assert persisted_notification.key_type == KEY_TYPE_NORMAL + assert persisted_notification.notification_type == "email" + + provider_tasks.deliver_email.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_EMAIL_MEDIUM + ) - notification_db = Notification.query.one() - assert notification_db.id == notification_id - assert notification_db.to == "Foo" - assert notification_db.job_id == job.id - assert notification_db.template_id == job.template.id - assert notification_db.template_version == job.template.version - assert notification_db.status == "created" - assert notification_db.created_at >= created_at - assert notification_db.notification_type == "letter" - assert notification_db.sent_at is None - assert notification_db.sent_by is None - assert notification_db.personalisation == personalisation - assert notification_db.reference == "this-is-random-in-real-life" - assert not notification_db.reply_to_text - - -def test_save_letter_uses_template_reply_to_text(mocker, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Address contact", is_default=True) - template_contact = create_letter_contact(service=service, contact_block="Template address contact", is_default=False) - template = create_template(service=service, template_type=LETTER_TYPE, reply_to=template_contact.id) - - job = create_job(template=template) - - mocker.patch( - "app.celery.tasks.create_random_identifier", - return_value="this-is-random-in-real-life", - ) - mocker.patch("app.celery.tasks.letters_pdf_tasks.create_letters_pdf.apply_async") + def test_save_email_should_use_template_version_from_job_not_latest(self, sample_email_template, mocker): + notification = _notification_json(sample_email_template, "my_email@my_email.com") + version_on_notification = sample_email_template.version + # Change the template + from app.dao.templates_dao import dao_get_template_by_id, dao_update_template + + sample_email_template.content = sample_email_template.content + " another version of the template" + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + dao_update_template(sample_email_template) + t = dao_get_template_by_id(sample_email_template.id) + assert t.version > version_on_notification + now = datetime.utcnow() + + save_emails(sample_email_template.service_id, [signer_notification.sign(notification)], uuid.uuid4()) + + persisted_notification = Notification.query.one() + assert persisted_notification.to == "my_email@my_email.com" + assert persisted_notification.template_id == sample_email_template.id + assert persisted_notification.template_version == version_on_notification + assert persisted_notification.created_at >= now + assert not persisted_notification.sent_at + assert persisted_notification.status == "created" + assert not persisted_notification.sent_by + assert persisted_notification.notification_type == "email" + provider_tasks.deliver_email.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_EMAIL_MEDIUM + ) - personalisation = { - "addressline1": "Foo", - "addressline2": "Bar", - "postcode": "Flob", - } - notification_json = _notification_json( - template=job.template, - to="Foo", - personalisation=personalisation, - job_id=job.id, - row_number=1, - ) + def test_should_use_email_template_subject_placeholders(self, sample_email_template_with_placeholders, mocker): + notification = _notification_json(sample_email_template_with_placeholders, "my_email@my_email.com", {"name": "Jo"}) + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + + notification_id = uuid.uuid4() + now = datetime.utcnow() + + save_emails(sample_email_template_with_placeholders.service_id, [signer_notification.sign(notification)], notification_id) + + persisted_notification = Notification.query.one() + assert persisted_notification.to == "my_email@my_email.com" + assert persisted_notification.template_id == sample_email_template_with_placeholders.id + assert persisted_notification.status == "created" + assert persisted_notification.created_at >= now + assert not persisted_notification.sent_by + assert persisted_notification.personalisation == {"name": "Jo"} + assert not persisted_notification.reference + assert persisted_notification.notification_type == "email" + provider_tasks.deliver_email.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_EMAIL_MEDIUM + ) - save_letter( - job.service_id, - uuid.uuid4(), - encryption.encrypt(notification_json), - ) + def test_save_email_uses_the_reply_to_text_when_provided(self, sample_email_template, mocker): + notification = _notification_json(sample_email_template, "my_email@my_email.com") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - notification_db = Notification.query.one() - assert notification_db.reply_to_text == "Template address contact" + service = sample_email_template.service + notification_id = uuid.uuid4() + service_email_reply_to_dao.add_reply_to_email_address_for_service(service.id, "default@example.com", True) + other_email_reply_to = service_email_reply_to_dao.add_reply_to_email_address_for_service( + service.id, "other@example.com", False + ) + notification["sender_id"] = str(other_email_reply_to.id) -def test_save_sms_uses_sms_sender_reply_to_text(mocker, notify_db_session): - service = create_service_with_defined_sms_sender(sms_sender_value="6502532222") - template = create_template(service=service) + save_emails(sample_email_template.service_id, [signer_notification.sign(notification)], notification_id) - notification = _notification_json(template, to="6502532222") - mocker.patch("app.celery.provider_tasks.deliver_throttled_sms.apply_async") + persisted_notification = Notification.query.one() + assert persisted_notification.notification_type == "email" + assert persisted_notification.reply_to_text == "other@example.com" - notification_id = uuid.uuid4() - save_sms( - service.id, - notification_id, - encryption.encrypt(notification), - ) + def test_save_email_uses_the_default_reply_to_text_if_sender_id_is_none(self, sample_email_template, mocker): + notification = _notification_json(sample_email_template, "my_email@my_email.com") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - persisted_notification = Notification.query.one() - assert persisted_notification.reply_to_text == "+16502532222" + service = sample_email_template.service + notification_id = uuid.uuid4() + service_email_reply_to_dao.add_reply_to_email_address_for_service(service.id, "default@example.com", True) + save_emails(sample_email_template.service_id, [signer_notification.sign(notification)], notification_id) -def test_save_sms_uses_non_default_sms_sender_reply_to_text_if_provided(mocker, notify_db_session): - service = create_service_with_defined_sms_sender(sms_sender_value="07123123123") - template = create_template(service=service) - new_sender = service_sms_sender_dao.dao_add_sms_sender_for_service(service.id, "new-sender", False) + persisted_notification = Notification.query.one() + assert persisted_notification.notification_type == "email" + assert persisted_notification.reply_to_text == "default@example.com" - notification = _notification_json(template, to="6502532222") - mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + def test_should_use_email_template_and_persist_without_personalisation(self, sample_email_template, mocker): + notification = _notification_json(sample_email_template, "my_email@my_email.com") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - notification_id = uuid.uuid4() - save_sms( - service.id, - notification_id, - encryption.encrypt(notification), - sender_id=new_sender.id, - ) + notification_id = uuid.uuid4() - persisted_notification = Notification.query.one() - assert persisted_notification.reply_to_text == "new-sender" + now = datetime.utcnow() + save_emails(sample_email_template.service_id, [signer_notification.sign(notification)], notification_id) -@pytest.mark.parametrize("env", ["staging", "live"]) -def test_save_letter_sets_delivered_letters_as_pdf_permission_in_research_mode_in_staging_live( - notify_api, mocker, notify_db_session, sample_letter_job, env -): - sample_letter_job.service.research_mode = True - sample_reference = "this-is-random-in-real-life" - mock_create_fake_letter_response_file = mocker.patch( - "app.celery.research_mode_tasks.create_fake_letter_response_file.apply_async" - ) - mocker.patch("app.celery.tasks.create_random_identifier", return_value=sample_reference) - - personalisation = { - "addressline1": "Foo", - "addressline2": "Bar", - "postcode": "Flob", - } - notification_json = _notification_json( - template=sample_letter_job.template, - to="Foo", - personalisation=personalisation, - job_id=sample_letter_job.id, - row_number=1, - ) - notification_id = uuid.uuid4() - - with set_config_values(notify_api, {"NOTIFY_ENVIRONMENT": env}): - save_letter( - sample_letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), + persisted_notification = Notification.query.one() + assert persisted_notification.to == "my_email@my_email.com" + assert persisted_notification.template_id == sample_email_template.id + assert persisted_notification.created_at >= now + assert not persisted_notification.sent_at + assert persisted_notification.status == "created" + assert not persisted_notification.sent_by + assert not persisted_notification.personalisation + assert not persisted_notification.reference + assert persisted_notification.notification_type == "email" + provider_tasks.deliver_email.apply_async.assert_called_once_with( + [str(persisted_notification.id)], queue=QueueNames.SEND_EMAIL_MEDIUM ) - notification = Notification.query.filter(Notification.id == notification_id).one() - assert notification.status == "delivered" - assert not mock_create_fake_letter_response_file.called + def test_save_email_should_go_to_retry_queue_if_database_errors(self, sample_email_template, mocker): + notification = _notification_json(sample_email_template, "test@example.gov.uk") + expected_exception = SQLAlchemyError() -@pytest.mark.parametrize("env", ["development", "preview"]) -def test_save_letter_calls_create_fake_response_for_letters_in_research_mode_on_development_preview( - notify_api, mocker, notify_db_session, sample_letter_job, env -): - sample_letter_job.service.research_mode = True - sample_reference = "this-is-random-in-real-life" - mock_create_fake_letter_response_file = mocker.patch( - "app.celery.research_mode_tasks.create_fake_letter_response_file.apply_async" - ) - mocker.patch("app.celery.tasks.create_random_identifier", return_value=sample_reference) - - personalisation = { - "addressline1": "Foo", - "addressline2": "Bar", - "postcode": "Flob", - } - notification_json = _notification_json( - template=sample_letter_job.template, - to="Foo", - personalisation=personalisation, - job_id=sample_letter_job.id, - row_number=1, - ) - notification_id = uuid.uuid4() - - with set_config_values(notify_api, {"NOTIFY_ENVIRONMENT": env}): - save_letter( - sample_letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + mocker.patch("app.celery.tasks.save_emails.retry", side_effect=Retry) + mocker.patch("app.celery.tasks.save_emails.max_retries", return_value=4) + mocker.patch( + "app.notifications.process_notifications.bulk_insert_notifications", + side_effect=expected_exception, ) + notification_id = uuid.uuid4() - mock_create_fake_letter_response_file.assert_called_once_with((sample_reference,), queue=QueueNames.RESEARCH_MODE) + with pytest.raises(Retry): + save_emails(sample_email_template.service_id, [signer_notification.sign(notification)], notification_id) + assert not provider_tasks.deliver_email.apply_async.called + tasks.save_emails.retry.assert_called_with(exc=expected_exception, queue="retry-tasks") -def test_save_letter_calls_create_letters_pdf_task_not_in_research(mocker, notify_db_session, sample_letter_job): - mock_create_letters_pdf = mocker.patch("app.celery.letters_pdf_tasks.create_letters_pdf.apply_async") + assert Notification.query.count() == 0 - personalisation = { - "addressline1": "Foo", - "addressline2": "Bar", - "postcode": "Flob", - } - notification_json = _notification_json( - template=sample_letter_job.template, - to="Foo", - personalisation=personalisation, - job_id=sample_letter_job.id, - row_number=1, - ) - notification_id = uuid.uuid4() - - save_letter( - sample_letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - assert mock_create_letters_pdf.called - mock_create_letters_pdf.assert_called_once_with([str(notification_id)], queue=QueueNames.CREATE_LETTERS_PDF) - - -def test_should_cancel_job_if_service_is_inactive(sample_service, sample_job, mocker): - sample_service.active = False - - mocker.patch("app.celery.tasks.s3.get_job_from_s3") - mocker.patch("app.celery.tasks.process_row") - - process_job(sample_job.id) + def test_save_email_does_not_send_duplicate_and_does_not_put_in_retry_queue(self, sample_notification, mocker): + json = _notification_json( + sample_notification.template, + sample_notification.to, + job_id=uuid.uuid4(), + row_number=1, + ) + deliver_email = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + retry = mocker.patch("app.celery.tasks.save_emails.retry", side_effect=Exception()) + notification_id = str(sample_notification.id) + json["id"] = str(sample_notification.id) - job = jobs_dao.dao_get_job_by_id(sample_job.id) - assert job.job_status == "cancelled" - s3.get_job_from_s3.assert_not_called() - tasks.process_row.assert_not_called() + save_emails(sample_notification.service_id, [signer_notification.sign(json)], notification_id) + assert Notification.query.count() == 1 + assert not deliver_email.called + assert not retry.called @pytest.mark.parametrize( @@ -1518,459 +1871,453 @@ def test_get_template_class(template_type, expected_class): assert get_template_class(template_type) == expected_class -def test_send_inbound_sms_to_service_post_https_request_to_service(notify_api, sample_service): - inbound_api = create_service_inbound_api( - service=sample_service, - url="https://some.service.gov.uk/", - bearer_token="something_unique", - ) - inbound_sms = create_inbound_sms( - service=sample_service, - notify_number="0751421", - user_number="447700900111", - provider_date=datetime(2017, 6, 20), - content="Here is some content", - ) - data = { - "id": str(inbound_sms.id), - "source_number": inbound_sms.user_number, - "destination_number": inbound_sms.notify_number, - "message": inbound_sms.content, - "date_received": inbound_sms.provider_date.strftime(DATETIME_FORMAT), - } - - with requests_mock.Mocker() as request_mock: - request_mock.post(inbound_api.url, json={}, status_code=200) - send_inbound_sms_to_service(inbound_sms.id, inbound_sms.service_id) - assert request_mock.call_count == 1 - assert request_mock.request_history[0].url == inbound_api.url - assert request_mock.request_history[0].method == "POST" - assert request_mock.request_history[0].text == json.dumps(data) - assert request_mock.request_history[0].headers["Content-type"] == "application/json" - assert request_mock.request_history[0].headers["Authorization"] == "Bearer {}".format(inbound_api.bearer_token) - - -def test_send_inbound_sms_to_service_does_not_send_request_when_inbound_sms_does_not_exist(notify_api, sample_service): - inbound_api = create_service_inbound_api(service=sample_service) - with requests_mock.Mocker() as request_mock: - request_mock.post(inbound_api.url, json={}, status_code=200) - with pytest.raises(SQLAlchemyError): - send_inbound_sms_to_service(inbound_sms_id=uuid.uuid4(), service_id=sample_service.id) - - assert request_mock.call_count == 0 - - -def test_send_inbound_sms_to_service_does_not_sent_request_when_inbound_api_does_not_exist(notify_api, sample_service, mocker): - inbound_sms = create_inbound_sms( - service=sample_service, - notify_number="0751421", - user_number="447700900111", - provider_date=datetime(2017, 6, 20), - content="Here is some content", - ) - mocked = mocker.patch("requests.request") - send_inbound_sms_to_service(inbound_sms.id, inbound_sms.service_id) - - mocked.call_count == 0 - - -def test_send_inbound_sms_to_service_retries_if_request_returns_500(notify_api, sample_service, mocker): - inbound_api = create_service_inbound_api( - service=sample_service, - url="https://some.service.gov.uk/", - bearer_token="something_unique", - ) - inbound_sms = create_inbound_sms( - service=sample_service, - notify_number="0751421", - user_number="447700900111", - provider_date=datetime(2017, 6, 20), - content="Here is some content", - ) +class TestSendInboundSmsToService: + def test_send_inbound_sms_to_service_post_https_request_to_service(self, notify_api, sample_service): + inbound_api = create_service_inbound_api( + service=sample_service, + url="https://some.service.gov.uk/", + bearer_token="something_unique", + ) + inbound_sms = create_inbound_sms( + service=sample_service, + notify_number="0751421", + user_number="447700900111", + provider_date=datetime(2017, 6, 20), + content="Here is some content", + ) + data = { + "id": str(inbound_sms.id), + "source_number": inbound_sms.user_number, + "destination_number": inbound_sms.notify_number, + "message": inbound_sms.content, + "date_received": inbound_sms.provider_date.strftime(DATETIME_FORMAT), + } - mocked = mocker.patch("app.celery.tasks.send_inbound_sms_to_service.retry") - with requests_mock.Mocker() as request_mock: - request_mock.post(inbound_api.url, json={}, status_code=500) + with requests_mock.Mocker() as request_mock: + request_mock.post(inbound_api.url, json={}, status_code=200) + send_inbound_sms_to_service(inbound_sms.id, inbound_sms.service_id) + assert request_mock.call_count == 1 + assert request_mock.request_history[0].url == inbound_api.url + assert request_mock.request_history[0].method == "POST" + assert request_mock.request_history[0].text == json.dumps(data) + assert request_mock.request_history[0].headers["Content-type"] == "application/json" + assert request_mock.request_history[0].headers["Authorization"] == "Bearer {}".format(inbound_api.bearer_token) + + def test_send_inbound_sms_to_service_does_not_send_request_when_inbound_sms_does_not_exist(self, notify_api, sample_service): + inbound_api = create_service_inbound_api(service=sample_service) + with requests_mock.Mocker() as request_mock: + request_mock.post(inbound_api.url, json={}, status_code=200) + with pytest.raises(SQLAlchemyError): + send_inbound_sms_to_service(inbound_sms_id=uuid.uuid4(), service_id=sample_service.id) + + assert request_mock.call_count == 0 + + def test_send_inbound_sms_to_service_does_not_sent_request_when_inbound_api_does_not_exist( + self, notify_api, sample_service, mocker + ): + inbound_sms = create_inbound_sms( + service=sample_service, + notify_number="0751421", + user_number="447700900111", + provider_date=datetime(2017, 6, 20), + content="Here is some content", + ) + mocked = mocker.patch("requests.request") send_inbound_sms_to_service(inbound_sms.id, inbound_sms.service_id) - assert mocked.call_count == 1 - assert mocked.call_args[1]["queue"] == "retry-tasks" - + mocked.call_count == 0 -def test_send_inbound_sms_to_service_retries_if_request_throws_unknown(notify_api, sample_service, mocker): - create_service_inbound_api( - service=sample_service, - url="https://some.service.gov.uk/", - bearer_token="something_unique", - ) - inbound_sms = create_inbound_sms( - service=sample_service, - notify_number="0751421", - user_number="447700900111", - provider_date=datetime(2017, 6, 20), - content="Here is some content", - ) - - mocked = mocker.patch("app.celery.tasks.send_inbound_sms_to_service.retry") - mocker.patch("app.celery.tasks.request", side_effect=RequestException()) + def test_send_inbound_sms_to_service_retries_if_request_returns_500(self, notify_api, sample_service, mocker): + inbound_api = create_service_inbound_api( + service=sample_service, + url="https://some.service.gov.uk/", + bearer_token="something_unique", + ) + inbound_sms = create_inbound_sms( + service=sample_service, + notify_number="0751421", + user_number="447700900111", + provider_date=datetime(2017, 6, 20), + content="Here is some content", + ) - send_inbound_sms_to_service(inbound_sms.id, inbound_sms.service_id) + mocked = mocker.patch("app.celery.tasks.send_inbound_sms_to_service.retry") + with requests_mock.Mocker() as request_mock: + request_mock.post(inbound_api.url, json={}, status_code=500) + send_inbound_sms_to_service(inbound_sms.id, inbound_sms.service_id) - assert mocked.call_count == 1 - assert mocked.call_args[1]["queue"] == "retry-tasks" + assert mocked.call_count == 1 + assert mocked.call_args[1]["queue"] == "retry-tasks" + def test_send_inbound_sms_to_service_retries_if_request_throws_unknown(self, notify_api, sample_service, mocker): + create_service_inbound_api( + service=sample_service, + url="https://some.service.gov.uk/", + bearer_token="something_unique", + ) + inbound_sms = create_inbound_sms( + service=sample_service, + notify_number="0751421", + user_number="447700900111", + provider_date=datetime(2017, 6, 20), + content="Here is some content", + ) -def test_send_inbound_sms_to_service_does_not_retries_if_request_returns_404(notify_api, sample_service, mocker): - inbound_api = create_service_inbound_api( - service=sample_service, - url="https://some.service.gov.uk/", - bearer_token="something_unique", - ) - inbound_sms = create_inbound_sms( - service=sample_service, - notify_number="0751421", - user_number="447700900111", - provider_date=datetime(2017, 6, 20), - content="Here is some content", - ) + mocked = mocker.patch("app.celery.tasks.send_inbound_sms_to_service.retry") + mocker.patch("app.celery.tasks.request", side_effect=RequestException()) - mocked = mocker.patch("app.celery.tasks.send_inbound_sms_to_service.retry") - with requests_mock.Mocker() as request_mock: - request_mock.post(inbound_api.url, json={}, status_code=404) send_inbound_sms_to_service(inbound_sms.id, inbound_sms.service_id) - mocked.call_count == 0 - - -def test_process_incomplete_job_sms(mocker, sample_template): - - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_sms"), - ) - save_sms = mocker.patch("app.celery.tasks.save_sms.apply_async") - - job = create_job( - template=sample_template, - notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR, - ) - - save_notification(create_notification(sample_template, job, 0)) - save_notification(create_notification(sample_template, job, 1)) - - assert Notification.query.filter(Notification.job_id == job.id).count() == 2 - - process_incomplete_job(str(job.id)) - - completed_job = Job.query.filter(Job.id == job.id).one() - - assert completed_job.job_status == JOB_STATUS_FINISHED - - assert save_sms.call_count == 8 # There are 10 in the file and we've added two already - - -def test_process_incomplete_job_with_notifications_all_sent(mocker, sample_template): - - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_sms"), - ) - mock_save_sms = mocker.patch("app.celery.tasks.save_sms.apply_async") - - job = create_job( - template=sample_template, - notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR, - ) - - save_notification(create_notification(sample_template, job, 0)) - save_notification(create_notification(sample_template, job, 1)) - save_notification(create_notification(sample_template, job, 2)) - save_notification(create_notification(sample_template, job, 3)) - save_notification(create_notification(sample_template, job, 4)) - save_notification(create_notification(sample_template, job, 5)) - save_notification(create_notification(sample_template, job, 6)) - save_notification(create_notification(sample_template, job, 7)) - save_notification(create_notification(sample_template, job, 8)) - save_notification(create_notification(sample_template, job, 9)) - - assert Notification.query.filter(Notification.job_id == job.id).count() == 10 - - process_incomplete_job(str(job.id)) - - completed_job = Job.query.filter(Job.id == job.id).one() - - assert completed_job.job_status == JOB_STATUS_FINISHED - - assert mock_save_sms.call_count == 0 # There are 10 in the file and we've added 10 it should not have been called - - -def test_process_incomplete_jobs_sms(mocker, sample_template): - - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_sms"), - ) - mock_save_sms = mocker.patch("app.celery.tasks.save_sms.apply_async") - - job = create_job( - template=sample_template, - notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR, - ) - save_notification(create_notification(sample_template, job, 0)) - save_notification(create_notification(sample_template, job, 1)) - save_notification(create_notification(sample_template, job, 2)) - - assert Notification.query.filter(Notification.job_id == job.id).count() == 3 - - job2 = create_job( - template=sample_template, - notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR, - ) - - save_notification(create_notification(sample_template, job2, 0)) - save_notification(create_notification(sample_template, job2, 1)) - save_notification(create_notification(sample_template, job2, 2)) - save_notification(create_notification(sample_template, job2, 3)) - save_notification(create_notification(sample_template, job2, 4)) - - assert Notification.query.filter(Notification.job_id == job2.id).count() == 5 - - jobs = [job.id, job2.id] - process_incomplete_jobs(jobs) - - completed_job = Job.query.filter(Job.id == job.id).one() - completed_job2 = Job.query.filter(Job.id == job2.id).one() - - assert completed_job.job_status == JOB_STATUS_FINISHED - - assert completed_job2.job_status == JOB_STATUS_FINISHED - - assert mock_save_sms.call_count == 12 # There are 20 in total over 2 jobs we've added 8 already - - -def test_process_incomplete_jobs_no_notifications_added(mocker, sample_template): - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_sms"), - ) - mock_save_sms = mocker.patch("app.celery.tasks.save_sms.apply_async") - - job = create_job( - template=sample_template, - notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR, - ) + assert mocked.call_count == 1 + assert mocked.call_args[1]["queue"] == "retry-tasks" - assert Notification.query.filter(Notification.job_id == job.id).count() == 0 + def test_send_inbound_sms_to_service_does_not_retries_if_request_returns_404(self, notify_api, sample_service, mocker): + inbound_api = create_service_inbound_api( + service=sample_service, + url="https://some.service.gov.uk/", + bearer_token="something_unique", + ) + inbound_sms = create_inbound_sms( + service=sample_service, + notify_number="0751421", + user_number="447700900111", + provider_date=datetime(2017, 6, 20), + content="Here is some content", + ) - process_incomplete_job(job.id) + mocked = mocker.patch("app.celery.tasks.send_inbound_sms_to_service.retry") + with requests_mock.Mocker() as request_mock: + request_mock.post(inbound_api.url, json={}, status_code=404) + send_inbound_sms_to_service(inbound_sms.id, inbound_sms.service_id) - completed_job = Job.query.filter(Job.id == job.id).one() + mocked.call_count == 0 - assert completed_job.job_status == JOB_STATUS_FINISHED - assert mock_save_sms.call_count == 10 # There are 10 in the csv file +class TestProcessIncompleteJob: + def test_process_incomplete_job_sms(self, mocker, sample_template): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + save_smss = mocker.patch("app.celery.tasks.save_smss.apply_async") + + job = create_job( + template=sample_template, + notification_count=10, + created_at=datetime.utcnow() - timedelta(hours=2), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_ERROR, + ) + save_notification(create_notification(sample_template, job, 0)) + save_notification(create_notification(sample_template, job, 1)) -def test_process_incomplete_jobs(mocker): + assert Notification.query.filter(Notification.job_id == job.id).count() == 2 - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_sms"), - ) - mock_save_sms = mocker.patch("app.celery.tasks.save_sms.apply_async") + process_incomplete_job(str(job.id)) - jobs = [] - process_incomplete_jobs(jobs) + assert save_smss.call_count == 1 # The save_smss call will be called once + assert len(save_smss.call_args[0][0][1]) == 8 # The unprocessed 8 notifications will be sent to save_smss - assert mock_save_sms.call_count == 0 # There are no jobs to process so it will not have been called + def test_process_incomplete_job_with_notifications_all_sent(self, mocker, sample_template): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mock_save_sms = mocker.patch("app.celery.tasks.save_smss.apply_async") + + job = create_job( + template=sample_template, + notification_count=10, + created_at=datetime.utcnow() - timedelta(hours=2), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_ERROR, + ) + save_notification(create_notification(sample_template, job, 0)) + save_notification(create_notification(sample_template, job, 1)) + save_notification(create_notification(sample_template, job, 2)) + save_notification(create_notification(sample_template, job, 3)) + save_notification(create_notification(sample_template, job, 4)) + save_notification(create_notification(sample_template, job, 5)) + save_notification(create_notification(sample_template, job, 6)) + save_notification(create_notification(sample_template, job, 7)) + save_notification(create_notification(sample_template, job, 8)) + save_notification(create_notification(sample_template, job, 9)) -def test_process_incomplete_job_no_job_in_database(mocker, fake_uuid): + assert Notification.query.filter(Notification.job_id == job.id).count() == 10 - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_sms"), - ) - mock_save_sms = mocker.patch("app.celery.tasks.save_sms.apply_async") + process_incomplete_job(str(job.id)) - with pytest.raises(expected_exception=Exception): - process_incomplete_job(fake_uuid) + assert mock_save_sms.call_count == 0 # There are 10 in the file and we've added 10 it should not have been called - assert mock_save_sms.call_count == 0 # There is no job in the db it will not have been called + def test_process_incomplete_jobs_sms(self, mocker, sample_template): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mock_save_smss = mocker.patch("app.celery.tasks.save_smss.apply_async") + + job = create_job( + template=sample_template, + notification_count=10, + created_at=datetime.utcnow() - timedelta(hours=2), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_ERROR, + ) + save_notification(create_notification(sample_template, job, 0)) + save_notification(create_notification(sample_template, job, 1)) + save_notification(create_notification(sample_template, job, 2)) + + assert Notification.query.filter(Notification.job_id == job.id).count() == 3 + + job2 = create_job( + template=sample_template, + notification_count=10, + created_at=datetime.utcnow() - timedelta(hours=2), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_ERROR, + ) + save_notification(create_notification(sample_template, job2, 0)) + save_notification(create_notification(sample_template, job2, 1)) + save_notification(create_notification(sample_template, job2, 2)) + save_notification(create_notification(sample_template, job2, 3)) + save_notification(create_notification(sample_template, job2, 4)) -def test_process_incomplete_job_email(mocker, sample_email_template): + assert Notification.query.filter(Notification.job_id == job2.id).count() == 5 - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_email"), - ) - mock_email_saver = mocker.patch("app.celery.tasks.save_email.apply_async") - - job = create_job( - template=sample_email_template, - notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR, - ) + jobs = [job.id, job2.id] + process_incomplete_jobs(jobs) - save_notification(create_notification(sample_email_template, job, 0)) - save_notification(create_notification(sample_email_template, job, 1)) + assert mock_save_smss.call_count == 2 + # The second time the job is called we will send 5 notifications through + assert len(mock_save_smss.call_args[0][0][1]) == 5 - assert Notification.query.filter(Notification.job_id == job.id).count() == 2 + def test_process_incomplete_jobs_no_notifications_added(self, mocker, sample_template): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mock_save_sms = mocker.patch("app.celery.tasks.save_smss.apply_async") + + job = create_job( + template=sample_template, + notification_count=10, + created_at=datetime.utcnow() - timedelta(hours=2), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_ERROR, + ) - process_incomplete_job(str(job.id)) + assert Notification.query.filter(Notification.job_id == job.id).count() == 0 - completed_job = Job.query.filter(Job.id == job.id).one() + process_incomplete_job(job.id) - assert completed_job.job_status == JOB_STATUS_FINISHED + assert mock_save_sms.call_count == 1 + assert len(mock_save_sms.call_args[0][0][1]) == 10 # There are 10 in the csv file - assert mock_email_saver.call_count == 8 # There are 10 in the file and we've added two already + def test_process_incomplete_jobs(self, mocker): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mock_save_sms = mocker.patch("app.celery.tasks.save_smss.apply_async") + jobs = [] + process_incomplete_jobs(jobs) -def test_process_incomplete_job_letter(mocker, sample_letter_template): - mocker.patch( - "app.celery.tasks.s3.get_job_from_s3", - return_value=load_example_csv("multiple_letter"), - ) - mock_letter_saver = mocker.patch("app.celery.tasks.save_letter.apply_async") - - job = create_job( - template=sample_letter_template, - notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR, - ) + assert mock_save_sms.call_count == 0 # There are no jobs to process so it will not have been called - save_notification(create_notification(sample_letter_template, job, 0)) - save_notification(create_notification(sample_letter_template, job, 1)) + def test_process_incomplete_job_no_job_in_database(self, mocker, fake_uuid): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_sms"), + ) + mock_save_sms = mocker.patch("app.celery.tasks.save_smss.apply_async") - assert Notification.query.filter(Notification.job_id == job.id).count() == 2 + with pytest.raises(expected_exception=Exception): + process_incomplete_job(fake_uuid) - process_incomplete_job(str(job.id)) + assert mock_save_sms.call_count == 0 # There is no job in the db it will not have been called - assert mock_letter_saver.call_count == 8 + def test_process_incomplete_job_email(self, mocker, sample_email_template): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_email"), + ) + mock_email_saver = mocker.patch("app.celery.tasks.save_emails.apply_async") + + job = create_job( + template=sample_email_template, + notification_count=10, + created_at=datetime.utcnow() - timedelta(hours=2), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_ERROR, + ) + save_notification(create_notification(sample_email_template, job, 0)) + save_notification(create_notification(sample_email_template, job, 1)) -@freeze_time("2017-01-01") -def test_process_incomplete_jobs_sets_status_to_in_progress_and_resets_processing_started_time(mocker, sample_template): - mock_process_incomplete_job = mocker.patch("app.celery.tasks.process_incomplete_job") + assert Notification.query.filter(Notification.job_id == job.id).count() == 2 - job1 = create_job( - sample_template, - processing_started=datetime.utcnow() - timedelta(minutes=30), - job_status=JOB_STATUS_ERROR, - ) - job2 = create_job( - sample_template, - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR, - ) + process_incomplete_job(str(job.id)) - process_incomplete_jobs([str(job1.id), str(job2.id)]) + assert mock_email_saver.call_count == 1 + assert len(mock_email_saver.call_args[0][0][1]) == 8 # There are 10 in the file and we've added two already - assert job1.job_status == JOB_STATUS_IN_PROGRESS - assert job1.processing_started == datetime.utcnow() + @pytest.mark.skip(reason="DEPRECATED: letter code") + def test_process_incomplete_job_letter(self, mocker, sample_letter_template): + mocker.patch( + "app.celery.tasks.s3.get_job_from_s3", + return_value=load_example_csv("multiple_letter"), + ) + mock_letter_saver = mocker.patch("app.celery.tasks.save_letter.apply_async") + + job = create_job( + template=sample_letter_template, + notification_count=10, + created_at=datetime.utcnow() - timedelta(hours=2), + scheduled_for=datetime.utcnow() - timedelta(minutes=31), + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_ERROR, + ) - assert job2.job_status == JOB_STATUS_IN_PROGRESS - assert job2.processing_started == datetime.utcnow() + save_notification(create_notification(sample_letter_template, job, 0)) + save_notification(create_notification(sample_letter_template, job, 1)) - assert mock_process_incomplete_job.mock_calls == [ - call(str(job1.id)), - call(str(job2.id)), - ] + assert Notification.query.filter(Notification.job_id == job.id).count() == 2 + process_incomplete_job(str(job.id)) -def test_process_returned_letters_list(sample_letter_template): - save_notification(create_notification(sample_letter_template, reference="ref1")) - save_notification(create_notification(sample_letter_template, reference="ref2")) + assert mock_letter_saver.call_count == 8 - process_returned_letters_list(["ref1", "ref2", "unknown-ref"]) + @freeze_time("2017-01-01") + def test_process_incomplete_jobs_does_not_change_status_and_resets_processing_started_time(self, mocker, sample_template): + mock_process_incomplete_job = mocker.patch("app.celery.tasks.process_incomplete_job") - notifications = Notification.query.all() + job1 = create_job( + sample_template, + processing_started=datetime.utcnow() - timedelta(minutes=30), + job_status=JOB_STATUS_ERROR, + ) + job2 = create_job( + sample_template, + processing_started=datetime.utcnow() - timedelta(minutes=31), + job_status=JOB_STATUS_ERROR, + ) - assert [n.status for n in notifications] == ["returned-letter", "returned-letter"] - assert all(n.updated_at for n in notifications) + process_incomplete_jobs([str(job1.id), str(job2.id)]) + assert job1.job_status == JOB_STATUS_ERROR + assert job1.processing_started == datetime.utcnow() -def test_process_returned_letters_list_updates_history_if_notification_is_already_purged( - sample_letter_template, -): - create_notification_history(sample_letter_template, reference="ref1") - create_notification_history(sample_letter_template, reference="ref2") + assert job2.job_status == JOB_STATUS_ERROR + assert job2.processing_started == datetime.utcnow() - process_returned_letters_list(["ref1", "ref2", "unknown-ref"]) + assert mock_process_incomplete_job.mock_calls == [ + call(str(job1.id)), + call(str(job2.id)), + ] - notifications = NotificationHistory.query.all() - assert [n.status for n in notifications] == ["returned-letter", "returned-letter"] - assert all(n.updated_at for n in notifications) +class TestSendNotifyNoReply: + def test_send_notify_no_reply(self, mocker, sample_notification, no_reply_template): + persist_mock = mocker.patch("app.celery.tasks.persist_notifications", return_value=[sample_notification]) + queue_mock = mocker.patch("app.celery.tasks.send_notification_to_queue") + data = json.dumps( + { + "sender": "sender@example.com", + "recipients": ["service@notify.ca"], + } + ) -def test_send_notify_no_reply(mocker, no_reply_template): - persist_mock = mocker.patch("app.celery.tasks.persist_notification") - queue_mock = mocker.patch("app.celery.tasks.send_notification_to_queue") + send_notify_no_reply(data) - data = json.dumps( - { - "sender": "sender@example.com", - "recipients": ["service@notify.ca"], + assert len(persist_mock.call_args_list) == 1 + persist_call = persist_mock.call_args_list[0][0][0][0] + assert persist_call["recipient"] == "sender@example.com" + assert persist_call["personalisation"] == { + "sending_email_address": "service@notify.ca", } - ) - - send_notify_no_reply(data) - - assert len(persist_mock.call_args_list) == 1 - persist_call = persist_mock.call_args_list[0][1] - - assert persist_call["recipient"] == "sender@example.com" - assert persist_call["personalisation"] == { - "sending_email_address": "service@notify.ca", - } - assert persist_call["reply_to_text"] is None - - assert len(queue_mock.call_args_list) == 1 - queue_call = queue_mock.call_args_list[0][1] - - assert queue_call["queue"] == QueueNames.NOTIFY - - -def test_send_notify_no_reply_retry(mocker, no_reply_template): - mocker.patch("app.celery.tasks.send_notify_no_reply.retry", side_effect=Retry) - mocker.patch("app.celery.tasks.send_notification_to_queue", side_effect=Exception()) - - with pytest.raises(Retry): - send_notify_no_reply( - json.dumps( - { - "sender": "sender@example.com", - "recipients": ["service@notify.ca"], - } + assert persist_call["reply_to_text"] is None + assert len(queue_mock.call_args_list) == 1 + queue_call = queue_mock.call_args_list[0][1] + + assert queue_call["queue"] == QueueNames.NOTIFY + + def test_send_notify_no_reply_retry(self, mocker, no_reply_template): + mocker.patch("app.celery.tasks.send_notify_no_reply.retry", side_effect=Retry) + mocker.patch("app.celery.tasks.send_notification_to_queue", side_effect=Exception()) + + with pytest.raises(Retry): + send_notify_no_reply( + json.dumps( + { + "sender": "sender@example.com", + "recipients": ["service@notify.ca"], + } + ) ) + + tasks.send_notify_no_reply.retry.assert_called_with(queue=QueueNames.RETRY) + + +class TestSeedBounceRateData: + def test_seed_bounce_rate_data(self, mocker, notify_api): + now = datetime.utcnow() + current_hour = datetime(now.year, now.month, now.day, now.hour, 0) + current_hour_minus_1 = current_hour - timedelta(hours=1) + current_hour_minus_2 = current_hour - timedelta(hours=2) + mocker.patch( + "app.celery.tasks.total_notifications_grouped_by_hour", + return_value=[ + (current_hour_minus_2, 2), + (current_hour_minus_1, 3), + (current_hour, 5), + ], + ) + mocker.patch( + "app.celery.tasks.total_hard_bounces_grouped_by_hour", + return_value=[(current_hour_minus_2, 1), (current_hour, 1)], + ) + mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") + mocker.patch("app.celery.tasks.bounce_rate_client.get_seeding_started", return_value=False) + service_id = "6ce466d0-fd6a-11e5-82f5-e0accb9d11a6" + + with notify_api.app_context(): + seed_bounce_rate_in_redis(service_id) + assert bounce_rate_client.get_bounce_rate(service_id) == 2 / 10.0 + + def test_seed_bounce_rate_data_isnt_called(self, mocker, notify_api): + hour_15 = datetime(2023, 4, 18, 15, 0) + hour_16 = datetime(2023, 4, 18, 16, 0) + hour_17 = datetime(2023, 4, 18, 17, 0) + mocker.patch( + "app.celery.tasks.total_notifications_grouped_by_hour", + return_value=[ + (hour_15, 2), + (hour_16, 3), + (hour_17, 5), + ], ) + mocker.patch( + "app.celery.tasks.total_hard_bounces_grouped_by_hour", + return_value=[(hour_15, 1), (hour_16, 1)], + ) + mocker.patch("app.celery.tasks.statsd_client.timing_with_dates") + mocked_set_seeded_total_notifications = mocker.patch("app.celery.tasks.bounce_rate_client.set_notifications_seeded") + mocked_set_seeded_hard_bounces = mocker.patch("app.celery.tasks.bounce_rate_client.set_hard_bounce_seeded") + mocker.patch("app.celery.tasks.bounce_rate_client.get_seeding_started", return_value=True) + + with notify_api.app_context(): + seed_bounce_rate_in_redis("6ce466d0-fd6a-11e5-82f5-e0accb9d11a6") - tasks.send_notify_no_reply.retry.assert_called_with(queue=QueueNames.RETRY) + mocked_set_seeded_total_notifications.assert_not_called() + mocked_set_seeded_hard_bounces.assert_not_called() diff --git a/tests/app/clients/test_aws_pinpoint.py b/tests/app/clients/test_aws_pinpoint.py new file mode 100644 index 0000000000..b913b1c39b --- /dev/null +++ b/tests/app/clients/test_aws_pinpoint.py @@ -0,0 +1,188 @@ +import pytest + +from app import aws_pinpoint_client +from app.clients.sms import SmsSendingVehicles +from tests.conftest import set_config_values + + +@pytest.mark.serial +@pytest.mark.parametrize("template_id", [None, "uuid"]) +def test_send_sms_sends_to_default_pool(notify_api, mocker, sample_template, template_id): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "6135555555" + content = "foo" + reference = "ref" + + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [], + }, + ): + aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=template_id) + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+16135555555", + OriginationIdentity="default_pool_id", + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) + + +@pytest.mark.serial +def test_send_sms_sends_sc_template_to_shortcode_pool_with_ff_false(notify_api, mocker, sample_template): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "6135555555" + content = "foo" + reference = "ref" + + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sample_template.id)], + "FF_TEMPLATE_CATEGORY": False, + }, + ): + aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id) + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+16135555555", + OriginationIdentity="sc_pool_id", + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) + + +@pytest.mark.serial +def test_send_sms_sends_notify_sms_to_shortcode_pool(notify_api, mocker, sample_template): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "6135555555" + content = "foo" + reference = "ref" + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sample_template.id)], + "NOTIFY_SERVICE_ID": "notify", + }, + ): + aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id, service_id="notify") + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+16135555555", + OriginationIdentity="sc_pool_id", + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) + + +def test_send_sms_returns_raises_error_if_there_is_no_valid_number_is_found(notify_api, mocker): + mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + + to = "" + content = reference = "foo" + + with pytest.raises(ValueError) as excinfo: + aws_pinpoint_client.send_sms(to, content, reference) + + assert "No valid numbers found for SMS delivery" in str(excinfo.value) + + +def test_handles_opted_out_numbers(notify_api, mocker, sample_template): + conflict_error = aws_pinpoint_client._client.exceptions.ConflictException( + error_response={"Reason": "DESTINATION_PHONE_NUMBER_OPTED_OUT"}, operation_name="send_text_message" + ) + mocker.patch("app.aws_pinpoint_client._client.send_text_message", side_effect=conflict_error) + + to = "6135555555" + content = "foo" + reference = "ref" + assert aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id) == "opted_out" + + +@pytest.mark.serial +@pytest.mark.parametrize( + "FF_TEMPLATE_CATEGORY, sending_vehicle, expected_pool", + [ + (False, None, "default_pool_id"), + (False, "long_code", "default_pool_id"), + (False, "short_code", "default_pool_id"), + (True, None, "default_pool_id"), + (True, "long_code", "default_pool_id"), + (True, "short_code", "sc_pool_id"), + ], +) +def test_respects_sending_vehicle_if_FF_enabled( + notify_api, mocker, sample_template, FF_TEMPLATE_CATEGORY, sending_vehicle, expected_pool +): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "6135555555" + content = "foo" + reference = "ref" + sms_sending_vehicle = None if sending_vehicle is None else SmsSendingVehicles(sending_vehicle) + + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [], + "FF_TEMPLATE_CATEGORY": FF_TEMPLATE_CATEGORY, + }, + ): + aws_pinpoint_client.send_sms( + to, content, reference=reference, template_id=sample_template.id, sending_vehicle=sms_sending_vehicle + ) + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+16135555555", + OriginationIdentity=expected_pool, + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) + + +@pytest.mark.serial +def test_send_sms_sends_international_without_pool_id(notify_api, mocker, sample_template): + boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True) + mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True) + to = "+447512501324" + content = "foo" + reference = "ref" + + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [], + }, + ): + aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id) + + boto_mock.send_text_message.assert_called_once_with( + DestinationPhoneNumber="+447512501324", + MessageBody=content, + MessageType="TRANSACTIONAL", + ConfigurationSetName="config_set_name", + ) diff --git a/tests/app/clients/test_aws_ses.py b/tests/app/clients/test_aws_ses.py index d984c7ff90..c33e72e826 100644 --- a/tests/app/clients/test_aws_ses.py +++ b/tests/app/clients/test_aws_ses.py @@ -197,7 +197,6 @@ def test_send_email_raises_bad_email_as_InvalidEmailError(mocker): ) assert "some error message from amazon" in str(excinfo.value) - assert "definitely@invalid_email.com" in str(excinfo.value) def test_send_email_raises_other_errs_as_AwsSesClientException(mocker): diff --git a/tests/app/clients/test_freshdesk.py b/tests/app/clients/test_freshdesk.py index afa588b93c..8ccbdae256 100644 --- a/tests/app/clients/test_freshdesk.py +++ b/tests/app/clients/test_freshdesk.py @@ -1,173 +1,332 @@ import base64 from typing import Any, Dict +import pytest import requests_mock from flask import Flask +from requests import RequestException -from app.clients.freshdesk import Freshdesk +from app.clients import freshdesk from app.user.contact_request import ContactRequest +from tests.conftest import set_config_values -def test_send_ticket_go_live_request(notify_api: Flask): - def match_json(request): - expected = { - "product_id": 42, - "subject": "Support Request", - "description": "t6 just requested to go live.

" - "- Department/org: department_org_name
" - "- Intended recipients: internal, external, public
" - "- Purpose: main_use_case
" - "- Notification types: email, sms
" - "- Expected monthly volume: 100k+
" - "---
" - "http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92", - "email": "test@email.com", - "priority": 1, - "status": 2, - "tags": [], - } - - encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://freshdesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=201, - ) - data: Dict[str, Any] = { - "email_address": "test@email.com", - "name": "name", - "department_org_name": "department_org_name", - "intended_recipients": "internal, external, public", - "main_use_case": "main_use_case", - "friendly_support_type": "Support Request", - "support_type": "go_live_request", - "service_name": "t6", - "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", - "service_url": "http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92", - "notification_types": "email, sms", - "expected_volume": "100k+", - } - with notify_api.app_context(): - response = Freshdesk(ContactRequest(**data)).send_ticket() - assert response == 201 - - -def test_send_ticket_branding_request(notify_api: Flask): - def match_json(request): - expected = { - "product_id": 42, - "subject": "Branding request", - "description": "A new logo has been uploaded by name (test@email.com) for the following service:
" - "- Service id: 8624bd36-b70b-4d4b-a459-13e1f4770b92
" - "- Service name: t6
" - "- Logo filename: branding_url
" - "

" - "Un nouveau logo a été téléchargé par name (test@email.com) pour le service suivant :
" - "- Identifiant du service : 8624bd36-b70b-4d4b-a459-13e1f4770b92
" - "- Nom du service : t6
" - "- Nom du fichier du logo : branding_url", - "email": "test@email.com", - "priority": 1, - "status": 2, - "tags": [], - } - - encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://freshdesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=201, - ) - data: Dict[str, Any] = { - "email_address": "test@email.com", - "name": "name", - "friendly_support_type": "Branding request", - "support_type": "branding_request", - "service_name": "t6", - "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", - "branding_url": "branding_url", - } - with notify_api.app_context(): - response = Freshdesk(ContactRequest(**data)).send_ticket() - assert response == 201 - - -def test_send_ticket_other(notify_api: Flask): - def match_json(request): - expected = { - "product_id": 42, - "subject": "Support Request", - "description": "", - "email": "test@email.com", - "priority": 1, - "status": 2, - "tags": [], - } - - encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://freshdesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=201, - ) - - with notify_api.app_context(): - response = Freshdesk(ContactRequest(email_address="test@email.com")).send_ticket() - assert response == 201 - - -def test_send_ticket_user_profile(notify_api: Flask): - def match_json(request): - expected = { - "product_id": 42, - "subject": "Support Request", - "description": "

---

user_profile", - "email": "test@email.com", - "priority": 1, - "status": 2, - "tags": [], - } - - encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://freshdesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=201, - ) - - with notify_api.app_context(): - response = Freshdesk( - ContactRequest( - email_address="test@email.com", - user_profile="user_profile", - ) - ).send_ticket() - assert response == 201 +class TestSendTicket: + @pytest.fixture() + def email_freshdesk_ticket_mock(self, mocker): + temp = freshdesk.Freshdesk.email_freshdesk_ticket + freshdesk.Freshdesk.email_freshdesk_ticket = mocker.Mock() + yield freshdesk.Freshdesk.email_freshdesk_ticket + freshdesk.Freshdesk.email_freshdesk_ticket = temp + + def test_send_ticket_demo(self, email_freshdesk_ticket_mock, notify_api: Flask): + def match_json(request): + expected = { + "product_id": 42, + "subject": "friendly-support-type-test", + "description": "- user: name-test test@email.com

" + "- department/org: dept-test

" + "- program/service: service-test

" + "- intended recipients: internal

" + "- main use case: main-use-case-test

" + "- main use case details: main-use-case-details-test", + "email": "test@email.com", + "priority": 1, + "status": 2, + "tags": [], + } + + encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") + json_matches = request.json() == expected + basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" + + return json_matches and basic_auth_header + + with requests_mock.mock() as rmock: + rmock.request( + "POST", + "https://freshdesk-test.com/api/v2/tickets", + additional_matcher=match_json, + status_code=201, + ) + + contact_request: Dict[str, Any] = { + "email_address": "test@email.com", + "name": "name-test", + "department_org_name": "dept-test", + "program_service_name": "service-test", + "intended_recipients": "internal", + "main_use_case": "main-use-case-test", + "main_use_case_details": "main-use-case-details-test", + "friendly_support_type": "friendly-support-type-test", + "language": "en", + "support_type": "demo", + } + + with notify_api.app_context(): + response = freshdesk.Freshdesk(ContactRequest(**contact_request)).send_ticket() + assert response == 201 + assert email_freshdesk_ticket_mock.not_called() + + def test_send_ticket_go_live_request(self, email_freshdesk_ticket_mock, notify_api: Flask): + def match_json(request): + expected = { + "product_id": 42, + "subject": "Support Request", + "description": "t6 just requested to go live.

" + "- Department/org: department_org_name
" + "- Intended recipients: internal, external, public
" + "- Purpose: main_use_case
" + "- Notification types: email, sms
" + "- Expected monthly volume: 100k+
" + "---
" + "http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92", + "email": "test@email.com", + "priority": 1, + "status": 2, + "tags": [], + } + + encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") + json_matches = request.json() == expected + basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" + + return json_matches and basic_auth_header + + with requests_mock.mock() as rmock: + rmock.request( + "POST", + "https://freshdesk-test.com/api/v2/tickets", + additional_matcher=match_json, + status_code=201, + ) + data: Dict[str, Any] = { + "email_address": "test@email.com", + "name": "name", + "department_org_name": "department_org_name", + "intended_recipients": "internal, external, public", + "main_use_case": "main_use_case", + "friendly_support_type": "Support Request", + "support_type": "go_live_request", + "service_name": "t6", + "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", + "service_url": "http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92", + "notification_types": "email, sms", + "expected_volume": "100k+", + } + with notify_api.app_context(): + response = freshdesk.Freshdesk(ContactRequest(**data)).send_ticket() + assert response == 201 + assert email_freshdesk_ticket_mock.not_called() + + def test_send_ticket_branding_request(self, email_freshdesk_ticket_mock, notify_api: Flask): + def match_json(request): + expected = { + "product_id": 42, + "subject": "Branding request", + "description": "A new logo has been uploaded by name (test@email.com) for the following service:
" + "- Service id: 8624bd36-b70b-4d4b-a459-13e1f4770b92
" + "- Service name: t6
" + "- Organisation id: 6b72e84f-8591-42e1-93b8-7d24a45e1d79
" + "- Organisation name: best org name ever
" + "- Logo filename: branding_url
" + "- Logo name: branding_logo_name
" + "- Alt text english: en alt text
" + "- Alt text french: fr alt text
" + "

" + "Un nouveau logo a été téléchargé par name (test@email.com) pour le service suivant :
" + "- Identifiant du service : 8624bd36-b70b-4d4b-a459-13e1f4770b92
" + "- Nom du service : t6
" + "- Identifiant de l'organisation: 6b72e84f-8591-42e1-93b8-7d24a45e1d79
" + "- Nom de l'organisation: best org name ever
" + "- Nom du fichier du logo : branding_url
" + "- Nom du logo : branding_logo_name
" + "- Texte alternatif anglais : en alt text
" + "- Texte alternatif français : fr alt text", + "email": "test@email.com", + "priority": 1, + "status": 2, + "tags": [], + } + + encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") + json_matches = request.json() == expected + basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" + + return json_matches and basic_auth_header + + with requests_mock.mock() as rmock: + rmock.request( + "POST", + "https://freshdesk-test.com/api/v2/tickets", + additional_matcher=match_json, + status_code=201, + ) + data: Dict[str, Any] = { + "email_address": "test@email.com", + "name": "name", + "friendly_support_type": "Branding request", + "support_type": "branding_request", + "service_name": "t6", + "organisation_id": "6b72e84f-8591-42e1-93b8-7d24a45e1d79", + "department_org_name": "best org name ever", + "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", + "branding_url": "branding_url", + "branding_logo_name": "branding_logo_name", + "alt_text_en": "en alt text", + "alt_text_fr": "fr alt text", + } + with notify_api.app_context(): + response = freshdesk.Freshdesk(ContactRequest(**data)).send_ticket() + assert response == 201 + assert email_freshdesk_ticket_mock.not_called() + + def test_send_ticket_other_category(self, email_freshdesk_ticket_mock, notify_api: Flask): + def match_json(request): + expected = { + "product_id": 42, + "subject": "New template category request", + "description": "New template category request from name (test@email.com):
" + "- Service id: 8624bd36-b70b-4d4b-a459-13e1f4770b92
" + "- New Template Category Request name: test category name
" + "- Template id request: http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92/templates/3ed1f07a-1b20-4f83-9a3e-158ab9b00103
" + "

" + "Demande de nouvelle catégorie de modèle de name (test@email.com):
" + "- Identifiant du service: 8624bd36-b70b-4d4b-a459-13e1f4770b92
" + "- Nom de la nouvelle catégorie de modèle demandée: test category name
" + "- Demande d'identifiant de modèle: http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92/templates/3ed1f07a-1b20-4f83-9a3e-158ab9b00103", + "email": "test@email.com", + "priority": 1, + "status": 2, + "tags": [], + } + + encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") + json_matches = request.json() == expected + basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" + + return json_matches and basic_auth_header + + with requests_mock.mock() as rmock: + rmock.request( + "POST", + "https://freshdesk-test.com/api/v2/tickets", + additional_matcher=match_json, + status_code=201, + ) + data: Dict[str, Any] = { + "email_address": "test@email.com", + "name": "name", + "friendly_support_type": "New template category request", + "support_type": "new_template_category_request", + "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", + "template_category_name_en": "test category name", + "template_category_name_fr": "test category name", + "template_id_link": "http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92/templates/3ed1f07a-1b20-4f83-9a3e-158ab9b00103", + } + with notify_api.app_context(): + response = freshdesk.Freshdesk(ContactRequest(**data)).send_ticket() + assert response == 201 + assert email_freshdesk_ticket_mock.not_called() + + def test_send_ticket_other(self, email_freshdesk_ticket_mock, notify_api: Flask): + def match_json(request): + expected = { + "product_id": 42, + "subject": "Support Request", + "description": "", + "email": "test@email.com", + "priority": 1, + "status": 2, + "tags": [], + } + + encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") + json_matches = request.json() == expected + basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" + + return json_matches and basic_auth_header + + with requests_mock.mock() as rmock: + rmock.request( + "POST", + "https://freshdesk-test.com/api/v2/tickets", + additional_matcher=match_json, + status_code=201, + ) + + with notify_api.app_context(): + response = freshdesk.Freshdesk(ContactRequest(email_address="test@email.com")).send_ticket() + assert response == 201 + assert email_freshdesk_ticket_mock.not_called() + + def test_send_ticket_user_profile(self, email_freshdesk_ticket_mock, notify_api: Flask): + def match_json(request): + expected = { + "product_id": 42, + "subject": "Support Request", + "description": "

---

user_profile", + "email": "test@email.com", + "priority": 1, + "status": 2, + "tags": [], + } + + encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii") + json_matches = request.json() == expected + basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" + + return json_matches and basic_auth_header + + with requests_mock.mock() as rmock: + rmock.request( + "POST", + "https://freshdesk-test.com/api/v2/tickets", + additional_matcher=match_json, + status_code=201, + ) + + with notify_api.app_context(): + response = freshdesk.Freshdesk( + ContactRequest( + email_address="test@email.com", + user_profile="user_profile", + ) + ).send_ticket() + assert response == 201 + assert email_freshdesk_ticket_mock.not_called() + + def test_send_ticket_freshdesk_integration_disabled(self, mocker, email_freshdesk_ticket_mock, notify_api: Flask): + mocked_post = mocker.patch("requests.post") + with set_config_values(notify_api, {"FRESH_DESK_ENABLED": False}): + with notify_api.app_context(): + response = freshdesk.Freshdesk(ContactRequest(email_address="test@email.com")).send_ticket() + mocked_post.assert_not_called() + email_freshdesk_ticket_mock.assert_not_called() + assert response == 201 + + def test_send_ticket_freshdesk_integration_broken(self, email_freshdesk_ticket_mock, mocker, notify_api: Flask): + mocked_post = mocker.patch("requests.post", side_effect=RequestException) + + with set_config_values(notify_api, {"FRESH_DESK_ENABLED": True, "FRESH_DESK_API_KEY": "x"}): + with notify_api.app_context(): + response = freshdesk.Freshdesk(ContactRequest(email_address="test@email.com")).send_ticket() + mocked_post.assert_called_once() + email_freshdesk_ticket_mock.assert_called_once() + assert response == 201 + + +class TestEmailFreshdesk: + def test_email_freshdesk_ticket(self, mocker, notify_api: Flask, contact_form_email_template): + mock_persist_notification = mocker.Mock() + mock_send_notification_to_queue = mocker.Mock() + freshdesk.persist_notification = mock_persist_notification + freshdesk.send_notification_to_queue = mock_send_notification_to_queue + + with set_config_values(notify_api, {"CONTACT_FORM_EMAIL_ADDRESS": "contact@test.com"}): + with notify_api.app_context(): + freshdesk_object = freshdesk.Freshdesk(ContactRequest(email_address="test@email.com")) + content = {"data": "data"} + freshdesk_object.email_freshdesk_ticket(content) + mock_persist_notification.assert_called_once() + mock_send_notification_to_queue.assert_called_once() diff --git a/tests/app/clients/test_salesforce_account.py b/tests/app/clients/test_salesforce_account.py new file mode 100644 index 0000000000..2ada7b8b3b --- /dev/null +++ b/tests/app/clients/test_salesforce_account.py @@ -0,0 +1,45 @@ +from app.clients.salesforce import salesforce_account +from app.clients.salesforce.salesforce_account import ( + ORG_NOTES_ORG_NAME_INDEX, + ORG_NOTES_OTHER_NAME_INDEX, + get_account_id_from_name, + get_org_name_from_notes, +) + + +def test_get_org_name_from_notes(): + assert get_org_name_from_notes("Account Name 1 > Service Name", ORG_NOTES_ORG_NAME_INDEX) == "Account Name 1" + assert get_org_name_from_notes("Account Name 2 > Another service Name") == "Account Name 2" + assert get_org_name_from_notes("Account Name 3 > Some service", ORG_NOTES_OTHER_NAME_INDEX) == "Some service" + assert get_org_name_from_notes("Account Name 4 > Service Name > Team Name", 2) == "Team Name" + assert get_org_name_from_notes(None) is None + assert get_org_name_from_notes(">") == "" + + +def test_get_account_id_from_name(mocker, notify_api): + mock_session = mocker.MagicMock() + mock_query_one = mocker.patch.object(salesforce_account, "query_one", return_value={"Id": "account_id"}) + with notify_api.app_context(): + assert get_account_id_from_name(mock_session, "Account Name", "generic_account_id") == "account_id" + mock_query_one.assert_called_with( + mock_session, + "SELECT Id FROM Account where Name = 'Account Name' OR CDS_AccountNameFrench__c = 'Account Name' LIMIT 1", + ) + + +def test_get_account_id_from_name_generic(mocker, notify_api): + mock_session = mocker.MagicMock() + mock_query_one = mocker.patch.object(salesforce_account, "query_one", return_value=None) + with notify_api.app_context(): + assert get_account_id_from_name(mock_session, "l'account", "generic_account_id") == "generic_account_id" + mock_query_one.assert_called_with( + mock_session, "SELECT Id FROM Account where Name = 'l\\'account' OR CDS_AccountNameFrench__c = 'l\\'account' LIMIT 1" + ) + + +def test_get_account_id_from_name_blank(mocker, notify_api): + mock_session = mocker.MagicMock() + with notify_api.app_context(): + assert get_account_id_from_name(mock_session, None, "generic_account_id") == "generic_account_id" + assert get_account_id_from_name(mock_session, "", "generic_account_id") == "generic_account_id" + assert get_account_id_from_name(mock_session, " ", "generic_account_id") == "generic_account_id" diff --git a/tests/app/clients/test_salesforce_auth.py b/tests/app/clients/test_salesforce_auth.py new file mode 100644 index 0000000000..2f8acaf46a --- /dev/null +++ b/tests/app/clients/test_salesforce_auth.py @@ -0,0 +1,48 @@ +from unittest.mock import call + +from simple_salesforce.exceptions import SalesforceAuthenticationFailed + +from app.clients.salesforce import salesforce_auth +from app.clients.salesforce.salesforce_auth import end_session, get_session + + +def test_get_session(mocker, notify_api): + with notify_api.app_context(): + mock_salesforce = mocker.patch.object(salesforce_auth, "Salesforce", return_value="session") + mock_timeout_adapter = mocker.patch.object(salesforce_auth, "TimeoutAdapter", return_value="timeout_adapter") + mock_requests = mocker.patch.object(salesforce_auth, "requests") + mock_requests.Session.return_value = mocker.MagicMock() + assert get_session("client_id", "username", "password", "security_token", "domain") == mock_salesforce.return_value + mock_salesforce.assert_called_with( + client_id="client_id", + username="username", + password="password", + security_token="security_token", + domain="domain", + session=mock_requests.Session.return_value, + ) + mock_requests.Session.return_value.mount.assert_has_calls( + [call("https://", mock_timeout_adapter.return_value), call("http://", mock_timeout_adapter.return_value)] + ) + + +def test_get_session_auth_failure(mocker, notify_api): + with notify_api.app_context(): + mocker.patch.object(salesforce_auth, "Salesforce", side_effect=SalesforceAuthenticationFailed("aw", "dang")) + assert get_session("client_id", "username", "password", "security_token", "domain") is None + + +def test_end_session(mocker, notify_api): + mock_session = mocker.MagicMock() + mock_session.session_id = "session_id" + with notify_api.app_context(): + end_session(mock_session) + mock_session.oauth2.assert_called_with("revoke", {"token": mock_session.session_id}, method="POST") + + +def test_end_session_no_session(mocker, notify_api): + mock_session = mocker.MagicMock() + mock_session.session_id = None + with notify_api.app_context(): + end_session(mock_session) + mock_session.oauth2.assert_not_called() diff --git a/tests/app/clients/test_salesforce_client.py b/tests/app/clients/test_salesforce_client.py new file mode 100644 index 0000000000..9358493bb4 --- /dev/null +++ b/tests/app/clients/test_salesforce_client.py @@ -0,0 +1,208 @@ +import pytest + +from app.clients.salesforce import ( + salesforce_account, + salesforce_auth, + salesforce_contact, + salesforce_engagement, +) +from app.clients.salesforce.salesforce_client import SalesforceClient +from app.models import User + + +@pytest.fixture(scope="function") +def salesforce_client(client, mocker): + client = SalesforceClient() + current_app = mocker.Mock( + config={ + "SALESFORCE_CLIENT_ID": "Notify", + "SALESFORCE_USERNAME": "someusername", + "SALESFORCE_PASSWORD": "somepassword", + "SALESFORCE_SECURITY_TOKEN": "somesecuritytoken", + "SALESFORCE_DOMAIN": "test", + "SALESFORCE_GENERIC_ACCOUNT_ID": "someaccountid", + } + ) + client.init_app(current_app) + return client + + +def test_get_session(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_auth, "get_session", return_value="session") + assert salesforce_client.get_session() == mock_get_session.return_value + mock_get_session.assert_called_once_with("Notify", "someusername", "somepassword", "somesecuritytoken", "test") + + +def test_end_session(mocker, salesforce_client): + mock_end_session = mocker.patch.object(salesforce_auth, "end_session", return_value="session") + salesforce_client.end_session("session") + mock_end_session.assert_called_once_with("session") + + +def test_contact_create(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_client, "get_session", return_value="session") + mock_create = mocker.patch.object(salesforce_contact, "create") + mock_end_session = mocker.patch.object(salesforce_client, "end_session") + + salesforce_client.contact_create("user") + + mock_get_session.assert_called_once() + mock_create.assert_called_once_with("session", "user", {}) + mock_end_session.assert_called_once_with("session") + + +def test_contact_update(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_client, "get_session", return_value="session") + mock_update = mocker.patch.object(salesforce_contact, "update") + mock_end_session = mocker.patch.object(salesforce_client, "end_session") + mock_user = User( + **{ + "id": 2, + "name": "Samwise Gamgee", + "email_address": "samwise@fellowship.ca", + "platform_admin": False, + } + ) + + salesforce_client.contact_update(mock_user) + + mock_get_session.assert_called_once() + mock_update.assert_called_once_with( + "session", + mock_user, + { + "FirstName": "Samwise", + "LastName": "Gamgee", + "Email": "samwise@fellowship.ca", + }, + ) + mock_end_session.assert_called_once_with("session") + + +def test_contact_update_account_id(mocker, salesforce_client): + mock_get_account_name_from_org = mocker.patch.object( + salesforce_account, "get_org_name_from_notes", return_value="account_name" + ) + mock_get_account_id_from_name = mocker.patch.object(salesforce_account, "get_account_id_from_name", return_value="account_id") + mock_update = mocker.patch.object(salesforce_contact, "update", return_value="contact_id") + mock_session = mocker.MagicMock() + mock_service = mocker.MagicMock() + mock_service.organisation_notes = "account_name > service_name" + + salesforce_client.contact_update_account_id(mock_session, mock_service, "user") + + mock_get_account_name_from_org.assert_called_once_with(mock_service.organisation_notes) + mock_get_account_id_from_name.assert_called_once_with(mock_session, "account_name", "someaccountid") + mock_update.assert_called_once_with(mock_session, "user", {"AccountId": "account_id"}) + + +def test_engagement_create(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_client, "get_session", return_value="session") + mock_contact_update_account_id = mocker.patch.object( + salesforce_client, "contact_update_account_id", return_value=("account_id", "contact_id") + ) + mock_create = mocker.patch.object(salesforce_engagement, "create") + mock_end_session = mocker.patch.object(salesforce_client, "end_session") + mock_service = mocker.MagicMock() + mock_service.organisation_notes = "account_name > service_name" + + salesforce_client.engagement_create(mock_service, "user") + + mock_get_session.assert_called_once() + mock_contact_update_account_id.assert_called_once_with("session", mock_service, "user") + mock_create.assert_called_once_with("session", mock_service, {}, "account_id", "contact_id") + mock_end_session.assert_called_once_with("session") + + +def test_engagement_update(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_client, "get_session", return_value="session") + mock_contact_update_account_id = mocker.patch.object( + salesforce_client, "contact_update_account_id", return_value=("account_id", "contact_id") + ) + mock_update = mocker.patch.object(salesforce_engagement, "update") + mock_end_session = mocker.patch.object(salesforce_client, "end_session") + mock_service = mocker.MagicMock() + mock_service.organisation_notes = "account_name > service_name" + + salesforce_client.engagement_update(mock_service, "user", {"StageName": "live", "Description": "would be oh so nice"}) + + mock_get_session.assert_called_once() + mock_contact_update_account_id.assert_called_once_with("session", mock_service, "user") + mock_update.assert_called_once_with( + "session", mock_service, {"StageName": "live", "Description": "would be oh so nice"}, "account_id", "contact_id" + ) + mock_end_session.assert_called_once_with("session") + + +def test_engagement_close(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_client, "get_session", return_value="session") + mock_get_engagement_by_service_id = mocker.patch.object( + salesforce_engagement, "get_engagement_by_service_id", return_value="engagement_id" + ) + mock_update = mocker.patch.object(salesforce_engagement, "update") + mock_end_session = mocker.patch.object(salesforce_client, "end_session") + mock_service = mocker.MagicMock() + mock_service.id = "service_id" + + salesforce_client.engagement_close(mock_service) + + mock_get_session.assert_called_once() + mock_get_engagement_by_service_id.assert_called_once_with("session", mock_service.id) + mock_update.assert_called_once_with( + "session", mock_service, {"CDS_Close_Reason__c": "Service deleted by user", "StageName": "Closed"}, None, None + ) + mock_end_session.assert_called_once_with("session") + + +def test_engagement_close_no_engagement(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_client, "get_session", return_value="session") + mock_get_engagement_by_service_id = mocker.patch.object( + salesforce_engagement, "get_engagement_by_service_id", return_value=None + ) + mock_update = mocker.patch.object(salesforce_engagement, "update") + mock_end_session = mocker.patch.object(salesforce_client, "end_session") + mock_service = mocker.MagicMock() + mock_service.id = "service_id" + + salesforce_client.engagement_close(mock_service) + + mock_get_session.assert_called_once() + mock_get_engagement_by_service_id.assert_called_once_with("session", mock_service.id) + mock_update.assert_not_called() + mock_end_session.assert_called_once_with("session") + + +def test_engagement_add_contact_role(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_client, "get_session", return_value="session") + mock_contact_update_account_id = mocker.patch.object( + salesforce_client, "contact_update_account_id", return_value=("account_id", "contact_id") + ) + mock_contact_role_add = mocker.patch.object(salesforce_engagement, "contact_role_add") + mock_end_session = mocker.patch.object(salesforce_client, "end_session") + mock_service = mocker.MagicMock() + mock_service.organisation_notes = "account_name > service_name" + + salesforce_client.engagement_add_contact_role(mock_service, "user") + + mock_get_session.assert_called_once() + mock_contact_update_account_id.assert_called_once_with("session", mock_service, "user") + mock_contact_role_add.assert_called_once_with("session", mock_service, "account_id", "contact_id") + mock_end_session.assert_called_once_with("session") + + +def test_engagement_delete_contact_role(mocker, salesforce_client): + mock_get_session = mocker.patch.object(salesforce_client, "get_session", return_value="session") + mock_contact_update_account_id = mocker.patch.object( + salesforce_client, "contact_update_account_id", return_value=("account_id", "contact_id") + ) + mock_contact_role_delete = mocker.patch.object(salesforce_engagement, "contact_role_delete") + mock_end_session = mocker.patch.object(salesforce_client, "end_session") + mock_service = mocker.MagicMock() + mock_service.organisation_notes = "account_name > service_name" + + salesforce_client.engagement_delete_contact_role(mock_service, "user") + + mock_get_session.assert_called_once() + mock_contact_update_account_id.assert_called_once_with("session", mock_service, "user") + mock_contact_role_delete.assert_called_once_with("session", mock_service, "account_id", "contact_id") + mock_end_session.assert_called_once_with("session") diff --git a/tests/app/clients/test_salesforce_contact.py b/tests/app/clients/test_salesforce_contact.py new file mode 100644 index 0000000000..513140be8c --- /dev/null +++ b/tests/app/clients/test_salesforce_contact.py @@ -0,0 +1,141 @@ +import pytest + +from app.clients.salesforce import salesforce_contact +from app.clients.salesforce.salesforce_contact import ( + create, + get_contact_by_user_id, + update, +) +from app.models import User + + +@pytest.fixture +def user(): + return User( + **{ + "id": 2, + "name": "Samwise Gamgee", + "email_address": "samwise@fellowship.ca", + "platform_admin": False, + } + ) + + +def test_create(mocker, notify_api, user): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_session.Contact.create.return_value = {"success": True, "id": "42"} + assert create(mock_session, user, {}) == "42" + mock_session.Contact.create.assert_called_with( + { + "FirstName": "Samwise", + "LastName": "Gamgee", + "Title": "created by Notify API", + "CDS_Contact_ID__c": "2", + "Email": "samwise@fellowship.ca", + }, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + + +def test_create_custom(mocker, notify_api, user): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_session.Contact.create.return_value = {"success": True, "id": "42"} + assert create(mock_session, user, {"AccountId": "Samwise", "Lambas": "Bread"}) == "42" + mock_session.Contact.create.assert_called_with( + { + "FirstName": "Samwise", + "LastName": "Gamgee", + "Title": "created by Notify API", + "CDS_Contact_ID__c": "2", + "Email": "samwise@fellowship.ca", + "AccountId": "Samwise", + "Lambas": "Bread", + }, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + + +def test_create_one_name(mocker, notify_api): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_session.Contact.create.return_value = {"success": True, "id": "42"} + mock_user = User( + **{ + "id": 3, + "name": "Gandalf", + "email_address": "gandalf@fellowship.ca", + "platform_admin": False, + } + ) + assert create(mock_session, mock_user, {}) == "42" + mock_session.Contact.create.assert_called_with( + { + "FirstName": "", + "LastName": "Gandalf", + "Title": "created by Notify API", + "CDS_Contact_ID__c": "3", + "Email": "gandalf@fellowship.ca", + }, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + + +def test_create_failed(mocker, notify_api, user): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_session.Contact.create.return_value = {"success": False} + assert create(mock_session, user, {}) is None + + +def test_create_exception(mocker, notify_api, user): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_session.Contact.create.side_effect = Exception() + assert create(mock_session, user, {}) is None + + +def test_update_account_id_existing(mocker, notify_api, user): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_get_contact_by_user_id = mocker.patch.object(salesforce_contact, "get_contact_by_user_id", return_value={"Id": "42"}) + mock_session.Contact.update.return_value = {"success": True, "Id": "42"} + + assert update(mock_session, user, {"AccountId": "potatoes", "Foo": "Bar"}) == "42" + + mock_session.Contact.update.assert_called_with( + "42", {"AccountId": "potatoes", "Foo": "Bar"}, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"} + ) + mock_get_contact_by_user_id.assert_called_with(mock_session, "2") + + +def test_update_account_id_new(mocker, notify_api, user): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_get_contact_by_user_id = mocker.patch.object(salesforce_contact, "get_contact_by_user_id", return_value=None) + mock_create = mocker.patch.object(salesforce_contact, "create", return_value="42") + + assert update(mock_session, user, {"AccountId": "potatoes", "Bam": "Baz"}) == "42" + + mock_get_contact_by_user_id.assert_called_with(mock_session, "2") + mock_create.assert_called_with(mock_session, user, {"AccountId": "potatoes", "Bam": "Baz"}) + + +def test_get_contact_by_user_id(mocker, notify_api): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_query_one = mocker.patch.object(salesforce_contact, "query_one", return_value={"Id": "42"}) + + assert get_contact_by_user_id(mock_session, "2") == {"Id": "42"} + mock_query_one.assert_called_with( + mock_session, "SELECT Id, FirstName, LastName, AccountId FROM Contact WHERE CDS_Contact_ID__c = '2' LIMIT 1" + ) + + +def test_get_contact_by_user_id_blank(mocker, notify_api): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + assert get_contact_by_user_id(mock_session, None) is None + assert get_contact_by_user_id(mock_session, "") is None + assert get_contact_by_user_id(mock_session, " ") is None diff --git a/tests/app/clients/test_salesforce_engagement.py b/tests/app/clients/test_salesforce_engagement.py new file mode 100644 index 0000000000..2da44f82db --- /dev/null +++ b/tests/app/clients/test_salesforce_engagement.py @@ -0,0 +1,277 @@ +import pytest + +from app.clients.salesforce import salesforce_engagement +from app.clients.salesforce.salesforce_engagement import ( + contact_role_add, + contact_role_delete, + create, + engagement_maxlengths, + get_engagement_by_service_id, + get_engagement_contact_role, + update, +) +from app.models import Service + + +@pytest.fixture +def service(): + return Service( + **{ + "id": 3, + "name": "The Fellowship", + } + ) + + +def test_create(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_session.Opportunity.create.return_value = {"success": True, "id": "9"} + mock_datetime = mocker.patch.object(salesforce_engagement, "datetime") + mock_datetime.today.return_value.strftime.return_value = "1970-01-01" + notify_api.config["SALESFORCE_ENGAGEMENT_RECORD_TYPE"] = "hobbitsis" + notify_api.config["SALESFORCE_ENGAGEMENT_STANDARD_PRICEBOOK_ID"] = "the ring" + notify_api.config["SALESFORCE_ENGAGEMENT_PRODUCT_ID"] = "my precious" + + assert create(mock_session, service, {}, "123", "456") == "9" + + mock_session.Opportunity.create.assert_called_with( + { + "Name": "The Fellowship", + "AccountId": "123", + "ContactId": "456", + "CDS_Opportunity_Number__c": "3", + "Notify_Organization_Other__c": None, + "CloseDate": "1970-01-01", + "RecordTypeId": "hobbitsis", + "StageName": salesforce_engagement.ENGAGEMENT_STAGE_TRIAL, + "Type": salesforce_engagement.ENGAGEMENT_TYPE, + "CDS_Lead_Team__c": salesforce_engagement.ENGAGEMENT_TEAM, + "Product_to_Add__c": salesforce_engagement.ENGAGEMENT_PRODUCT, + }, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + + mock_session.OpportunityLineItem.create.assert_called_with( + { + "OpportunityId": "9", + "PricebookEntryId": "the ring", + "Product2Id": "my precious", + "Quantity": 1, + "UnitPrice": 0, + }, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + + +def test_create_custom_fields(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_session.Opportunity.create.return_value = {"success": True, "id": "9"} + mock_datetime = mocker.patch.object(salesforce_engagement, "datetime") + mock_datetime.today.return_value.strftime.return_value = "1970-01-01" + notify_api.config["SALESFORCE_ENGAGEMENT_RECORD_TYPE"] = "hobbitsis" + notify_api.config["SALESFORCE_ENGAGEMENT_STANDARD_PRICEBOOK_ID"] = "the ring" + notify_api.config["SALESFORCE_ENGAGEMENT_PRODUCT_ID"] = "my precious" + + assert create(mock_session, service, {"StageName": "lambdas", "NewField": "Muffins"}, "123", "456") == "9" + + mock_session.Opportunity.create.assert_called_with( + { + "Name": "The Fellowship", + "AccountId": "123", + "ContactId": "456", + "CDS_Opportunity_Number__c": "3", + "Notify_Organization_Other__c": None, + "CloseDate": "1970-01-01", + "RecordTypeId": "hobbitsis", + "StageName": "lambdas", + "Type": salesforce_engagement.ENGAGEMENT_TYPE, + "CDS_Lead_Team__c": salesforce_engagement.ENGAGEMENT_TEAM, + "Product_to_Add__c": salesforce_engagement.ENGAGEMENT_PRODUCT, + "NewField": "Muffins", + }, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + + mock_session.OpportunityLineItem.create.assert_called_with( + { + "OpportunityId": "9", + "PricebookEntryId": "the ring", + "Product2Id": "my precious", + "Quantity": 1, + "UnitPrice": 0, + }, + headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"}, + ) + + +def test_create_no_engagement_id(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_session.Opportunity.create.return_value = {"success": False} + assert create(mock_session, service, {}, "123", "456") is None + mock_session.Opportunity.create.assert_called_once() + mock_session.OpportunityLineItem.create.assert_not_called() + + +def test_create_no_engagement(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + assert create(mock_session, service, {}, None, None) is None + mock_session.Opportunity.create.assert_not_called() + mock_session.OpportunityLineItem.create.assert_not_called() + + +def test_update_stage_existing(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_get_engagement_by_service_id = mocker.patch.object( + salesforce_engagement, "get_engagement_by_service_id", return_value={"Id": "42"} + ) + mock_session.Opportunity.update.return_value = {"success": True, "Id": "42"} + + assert update(mock_session, service, {"StageName": "potatoes", "Method": "bake em"}, None, None) == "42" + + mock_session.Opportunity.update.assert_called_with( + "42", {"StageName": "potatoes", "Method": "bake em"}, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"} + ) + mock_get_engagement_by_service_id.assert_called_with(mock_session, "3") + + +def test_update_stage_new(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_get_engagement_by_service_id = mocker.patch.object( + salesforce_engagement, "get_engagement_by_service_id", return_value=None + ) + mock_create = mocker.patch.object(salesforce_engagement, "create", return_value="42") + + assert update(mock_session, service, {"StageName": "potatoes"}, "account_id", "contact_id") == "42" + + mock_get_engagement_by_service_id.assert_called_with(mock_session, "3") + mock_create.assert_called_with(mock_session, service, {"StageName": "potatoes"}, "account_id", "contact_id") + + +def test_update_stage_failed(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mocker.patch.object(salesforce_engagement, "get_engagement_by_service_id", return_value={"Id": "42"}) + mock_session.Opportunity.update.return_value = {"success": False} + assert update(mock_session, service, {"StageName": "potatoes"}, "account_id", "contact_id") is None + + +def test_contact_role_add(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_get_engagement_by_service_id = mocker.patch.object( + salesforce_engagement, "get_engagement_by_service_id", return_value={"Id": "42"} + ) + mock_session.OpportunityContactRole.create.return_value = {"success": True, "Id": "42"} + + assert contact_role_add(mock_session, service, "1", "2") is None + mock_session.OpportunityContactRole.create.assert_called_with( + {"ContactId": "2", "OpportunityId": "42"}, headers={"Sforce-Duplicate-Rule-Header": "allowSave=true"} + ) + mock_get_engagement_by_service_id.assert_called_with(mock_session, "3") + + +def test_contact_role_add_create_engagement(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_get_engagement_by_service_id = mocker.patch.object( + salesforce_engagement, "get_engagement_by_service_id", return_value=None + ) + mock_create = mocker.patch.object(salesforce_engagement, "create", return_value=None) + mock_session.OpportunityContactRole.create.return_value = {"success": True, "Id": "42"} + + assert contact_role_add(mock_session, service, "1", "2") is None + mock_session.OpportunityContactRole.create.assert_not_called() + mock_get_engagement_by_service_id.assert_called_with(mock_session, "3") + mock_create.assert_called_with(mock_session, service, {}, "1", "2") + + +def test_contact_role_delete(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_get_engagement_by_service_id = mocker.patch.object( + salesforce_engagement, "get_engagement_by_service_id", return_value={"Id": "42"} + ) + mock_get_engagement_contact_role = mocker.patch.object( + salesforce_engagement, "get_engagement_contact_role", return_value={"Id": "1024"} + ) + mock_session.OpportunityContactRole.delete.return_value = {"success": True} + + assert contact_role_delete(mock_session, service, "1", "2") is None + mock_session.OpportunityContactRole.delete.assert_called_with("1024") + mock_get_engagement_by_service_id.assert_called_with(mock_session, "3") + mock_get_engagement_contact_role.assert_called_with(mock_session, "42", "2") + + +def test_contact_role_delete_no_contact_role(mocker, notify_api, service): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_get_engagement_by_service_id = mocker.patch.object( + salesforce_engagement, "get_engagement_by_service_id", return_value={"Id": "42"} + ) + mock_get_engagement_contact_role = mocker.patch.object( + salesforce_engagement, "get_engagement_contact_role", return_value=None + ) + + assert contact_role_delete(mock_session, service, "1", "2") is None + mock_session.OpportunityContactRole.delete.assert_not_called() + mock_get_engagement_by_service_id.assert_called_with(mock_session, "3") + mock_get_engagement_contact_role.assert_called_with(mock_session, "42", "2") + + +def test_get_engagement_by_service_id(mocker, notify_api): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_query_one = mocker.patch.object(salesforce_engagement, "query_one", return_value={"Id": "42"}) + + assert get_engagement_by_service_id(mock_session, "2") == {"Id": "42"} + mock_query_one.assert_called_with( + mock_session, "SELECT Id, Name, ContactId, AccountId FROM Opportunity where CDS_Opportunity_Number__c = '2' LIMIT 1" + ) + + +def test_get_engagement_by_service_id_blank(mocker, notify_api): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + assert get_engagement_by_service_id(mock_session, None) is None + assert get_engagement_by_service_id(mock_session, "") is None + assert get_engagement_by_service_id(mock_session, " ") is None + + +def test_get_engagement_contact_role(mocker, notify_api): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + mock_query_one = mocker.patch.object( + salesforce_engagement, "query_one", return_value={"Id": "42", "OpportunityId": "1", "ContactId": "2"} + ) + + assert get_engagement_contact_role(mock_session, "1", "2") == {"Id": "42", "OpportunityId": "1", "ContactId": "2"} + mock_query_one.assert_called_with( + mock_session, + "SELECT Id, OpportunityId, ContactId FROM OpportunityContactRole WHERE OpportunityId = '1' AND ContactId = '2' LIMIT 1", + ) + + +def test_get_engagement_contact_role_blank(mocker, notify_api): + with notify_api.app_context(): + mock_session = mocker.MagicMock() + assert get_engagement_contact_role(mock_session, None, None) is None + assert get_engagement_contact_role(mock_session, "", "") is None + assert get_engagement_contact_role(mock_session, " ", " ") is None + assert get_engagement_contact_role(mock_session, "1", None) is None + assert get_engagement_contact_role(mock_session, "", "2") is None + assert get_engagement_contact_role(mock_session, "3", " ") is None + + +def test_engagement_maxlengths(): + assert engagement_maxlengths({"foo": "bar"}) == {"foo": "bar"} + assert engagement_maxlengths({"foo": "bar", "bam": "baz"}) == {"foo": "bar", "bam": "baz"} + assert engagement_maxlengths({"Name": "this name is short enough"}) == {"Name": "this name is short enough"} + assert engagement_maxlengths({"Name": f"this name is not short enough {150 * 'x'}"}) == { + "Name": f"this name is not short enough {90 * 'x'}" + } diff --git a/tests/app/clients/test_salesforce_utils.py b/tests/app/clients/test_salesforce_utils.py new file mode 100644 index 0000000000..5e01e72ebe --- /dev/null +++ b/tests/app/clients/test_salesforce_utils.py @@ -0,0 +1,45 @@ +from app.clients.salesforce.salesforce_utils import ( + get_name_parts, + parse_result, + query_one, + query_param_sanitize, +) + + +def test_get_name_parts(): + assert get_name_parts("Frodo Baggins") == {"first": "Frodo", "last": "Baggins"} + assert get_name_parts("Smaug") == {"first": "", "last": "Smaug"} + assert get_name_parts("") == {"first": "", "last": ""} + assert get_name_parts("Gandalf The Grey") == {"first": "Gandalf", "last": "The Grey"} + + +def test_query_one_result(mocker): + mock_session = mocker.MagicMock() + mock_session.query.return_value = {"totalSize": 1, "records": [{"id": "123"}]} + assert query_one(mock_session, "some query") == {"id": "123"} + mock_session.query.assert_called_once_with("some query") + + +def test_query_one_no_results(mocker, notify_api): + mock_session = mocker.MagicMock() + with notify_api.app_context(): + mock_session.query.side_effect = [{"totalSize": 2}, {}] + assert query_one(mock_session, "some query") is None + assert query_one(mock_session, "some query") is None + + +def test_query_param_sanitize(): + assert query_param_sanitize("some string") == "some string" + assert query_param_sanitize("fancy'ish apostrophe's") == "fancy\\'ish apostrophe\\'s" + + +def test_parse_result(notify_api): + with notify_api.app_context(): + assert parse_result(200, "int") is True + assert parse_result(299, "int") is True + assert parse_result(100, "int") is False + assert parse_result(400, "int") is False + assert parse_result(500, "int") is False + assert parse_result({"success": True}, "dict") is True + assert parse_result({"success": False}, "dict") is False + assert parse_result({}, "dict") is False diff --git a/tests/app/clients/test_zendesk.py b/tests/app/clients/test_zendesk.py deleted file mode 100644 index 3e853caf88..0000000000 --- a/tests/app/clients/test_zendesk.py +++ /dev/null @@ -1,187 +0,0 @@ -import base64 -from typing import Any, Dict - -import pytest -import requests_mock -from flask import Flask -from requests import HTTPError - -from app.clients.zendesk import Zendesk -from app.user.contact_request import ContactRequest - - -def test_send_ticket_go_live_request(notify_api: Flask): - def match_json(request): - expected = { - "ticket": { - "subject": "Support Request", - "description": "t6 just requested to go live.

" - "- Department/org: department_org_name
" - "- Intended recipients: internal, external, public
" - "- Purpose: main_use_case
" - "- Notification types: email, sms
" - "- Expected monthly volume: 100k+
" - "---
" - "http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92", - "email": "test@email.com", - "tags": ["notification_api"], - } - } - - encoded_auth = base64.b64encode(b"test@email.com/token:zendesk-api-key").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://zendesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=201, - ) - data: Dict[str, Any] = { - "email_address": "test@email.com", - "name": "name", - "department_org_name": "department_org_name", - "intended_recipients": "internal, external, public", - "main_use_case": "main_use_case", - "friendly_support_type": "Support Request", - "support_type": "go_live_request", - "service_name": "t6", - "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", - "service_url": "http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92", - "notification_types": "email, sms", - "expected_volume": "100k+", - } - with notify_api.app_context(): - Zendesk(ContactRequest(**data)).send_ticket() - - -def test_send_ticket_branding_request(notify_api: Flask): - def match_json(request): - expected = { - "ticket": { - "subject": "Branding request", - "description": "A new logo has been uploaded by name (test@email.com) for the following service:
" - "- Service id: 8624bd36-b70b-4d4b-a459-13e1f4770b92
" - "- Service name: t6
" - "- Logo filename: branding_url
" - "

" - "Un nouveau logo a été téléchargé par name (test@email.com) pour le service suivant :
" - "- Identifiant du service : 8624bd36-b70b-4d4b-a459-13e1f4770b92
" - "- Nom du service : t6
" - "- Nom du fichier du logo : branding_url", - "email": "test@email.com", - "tags": ["notification_api"], - } - } - - encoded_auth = base64.b64encode(b"test@email.com/token:zendesk-api-key").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://zendesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=201, - ) - data: Dict[str, Any] = { - "email_address": "test@email.com", - "name": "name", - "friendly_support_type": "Branding request", - "support_type": "branding_request", - "service_name": "t6", - "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92", - "branding_url": "branding_url", - } - with notify_api.app_context(): - Zendesk(ContactRequest(**data)).send_ticket() - - -def test_send_ticket_other(notify_api: Flask): - def match_json(request): - expected = { - "ticket": {"subject": "Support Request", "description": "", "email": "test@email.com", "tags": ["notification_api"]} - } - - encoded_auth = base64.b64encode(b"test@email.com/token:zendesk-api-key").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://zendesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=201, - ) - - with notify_api.app_context(): - Zendesk(ContactRequest(email_address="test@email.com")).send_ticket() - - -def test_send_ticket_user_profile(notify_api: Flask): - def match_json(request): - expected = { - "ticket": { - "subject": "Support Request", - "description": "

---

user_profile", - "email": "test@email.com", - "tags": ["notification_api"], - } - } - - encoded_auth = base64.b64encode(b"test@email.com/token:zendesk-api-key").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://zendesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=201, - ) - - with notify_api.app_context(): - Zendesk( - ContactRequest( - email_address="test@email.com", - user_profile="user_profile", - ) - ).send_ticket() - - -def test_send_ticket_unknown_error(notify_api: Flask): - def match_json(request): - expected = { - "ticket": {"subject": "Support Request", "description": "", "email": "test@email.com", "tags": ["notification_api"]} - } - - encoded_auth = base64.b64encode(b"test@email.com/token:zendesk-api-key").decode("ascii") - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - "https://zendesk-test.com/api/v2/tickets", - additional_matcher=match_json, - status_code=403, - ) - - with notify_api.app_context(): - with pytest.raises(HTTPError): - Zendesk(ContactRequest(email_address="test@email.com")).send_ticket() diff --git a/tests/app/clients/test_zendesk_sell.py b/tests/app/clients/test_zendesk_sell.py deleted file mode 100644 index b2b2ae644b..0000000000 --- a/tests/app/clients/test_zendesk_sell.py +++ /dev/null @@ -1,612 +0,0 @@ -import json -from typing import Any, Dict, Optional, Union - -import pytest -import requests_mock -from flask import Flask -from pytest_mock import MockFixture - -from app.clients.zendesk_sell import ZenDeskSell -from app.models import Service -from app.user.contact_request import ContactRequest - - -def test_create_lead(notify_api: Flask): - def match_json(request): - expected = { - "data": { - "last_name": "User", - "first_name": "Test", - "organization_name": "", - "email": "test@email.com", - "description": "Program: \n: ", - "tags": ["", "en"], - "status": "New", - "source_id": 2085874, - "owner_id": ZenDeskSell.OWNER_ID, - "custom_fields": { - "Product": ["Notify"], - "Intended recipients": "No value", - }, - } - } - - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == "Bearer zendesksell-api-key" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - url="https://zendesksell-test.com/v2/leads/upsert?email=test@email.com", - headers={"Accept": "application/json", "Content-Type": "application/json"}, - additional_matcher=match_json, - status_code=201, - ) - - with notify_api.app_context(): - response = ZenDeskSell().upsert_lead(ContactRequest(email_address="test@email.com", name="Test User")) - assert response == 201 - - -def test_create_lead_missing_name(notify_api: Flask): - - # Name field is a requirement for the zendesk sell API interface - with notify_api.app_context(): - with pytest.raises(AssertionError): - ZenDeskSell().upsert_lead(ContactRequest(email_address="test@email.com")) - - -def generate_contact_url(existing_contact_id: Optional[str], service: Service) -> str: - if existing_contact_id: - return f"https://zendesksell-test.com/v2/contacts/{existing_contact_id}" - else: - return f"https://zendesksell-test.com/v2/contacts/upsert?" f"custom_fields[notify_user_id]={str(service.users[0].id)}" - - -def contact_http_method(existing_contact_id: Optional[str]): - return "PUT" if existing_contact_id else "POST" - - -@pytest.mark.parametrize( - "existing_contact_id,created_at,updated_at,expected_created", - [ - (None, "2021-03-24T14:49:38Z", "2021-03-24T14:49:38Z", True), - (None, "2021-03-24T14:49:38Z", "2021-04-24T14:49:38Z", False), - ("1", "2021-03-24T14:49:38Z", "2021-04-24T14:49:38Z", False), - ], -) -def test_create_or_upsert_contact( - existing_contact_id: Optional[str], - created_at: str, - updated_at: str, - expected_created: bool, - notify_api: Flask, - sample_service: Service, -): - def match_json(request): - expected = { - "data": { - "last_name": "User", - "first_name": "Test", - "email": "notify@digital.cabinet-office.gov.uk", - "mobile": "+16502532222", - "owner_id": ZenDeskSell.OWNER_ID, - "custom_fields": {"notify_user_id": str(sample_service.users[0].id)}, - } - } - - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == "Bearer zendesksell-api-key" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - expected_contact_id = existing_contact_id or "123456789" - resp_data = { - "data": { - "id": expected_contact_id, - "created_at": created_at, - "updated_at": updated_at, - } - } - rmock.request( - contact_http_method(existing_contact_id), - url=generate_contact_url(existing_contact_id, sample_service), - headers={"Accept": "application/json", "Content-Type": "application/json"}, - additional_matcher=match_json, - status_code=200, - text=json.dumps(resp_data), - ) - with notify_api.app_context(): - contact_id, is_created = ZenDeskSell().upsert_contact(sample_service.users[0], existing_contact_id) - assert expected_contact_id == contact_id - assert is_created == expected_created - - -@pytest.mark.parametrize( - "existing_contact_id,expected_resp_data", - [ - (None, {"blank": "blank"}), - ( - None, - { - "data": { - "created_at": "2021-02-24T14:49:38Z", - "updated_at": "2021-03-24T14:49:38Z", - } - }, - ), - (None, {"data": {"id": "123456789", "created_at": "2021-02-24T14:49:38Z"}}), - (None, {"data": {"id": "123456789", "updated_at": "2021-02-24T14:49:38Z"}}), - (1, {"blank": "blank"}), - ( - 1, - { - "data": { - "created_at": "2021-02-24T14:49:38Z", - "updated_at": "2021-03-24T14:49:38Z", - } - }, - ), - (1, {"data": {"id": "123456789", "created_at": "2021-02-24T14:49:38Z"}}), - (1, {"data": {"id": "123456789", "updated_at": "2021-02-24T14:49:38Z"}}), - ], -) -def test_create_contact_invalid_response( - notify_api: Flask, - sample_service: Service, - existing_contact_id: Optional[str], - expected_resp_data: Dict[str, Dict[str, Union[int, str]]], -): - def match_json(request): - expected = { - "data": { - "last_name": "User", - "first_name": "Test", - "email": "notify@digital.cabinet-office.gov.uk", - "mobile": "+16502532222", - "owner_id": ZenDeskSell.OWNER_ID, - "custom_fields": {"notify_user_id": str(sample_service.users[0].id)}, - } - } - - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == "Bearer zendesksell-api-key" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - rmock.request( - contact_http_method(existing_contact_id), - url=generate_contact_url(existing_contact_id, sample_service), - headers={"Accept": "application/json", "Content-Type": "application/json"}, - additional_matcher=match_json, - status_code=200, - text=json.dumps(expected_resp_data), - ) - with notify_api.app_context(): - contact_id, _ = ZenDeskSell().upsert_contact(sample_service.users[0], existing_contact_id) - assert not contact_id - - -def test_convert_lead_to_contact(notify_api: Flask, sample_service: Service): - lead_id = "123456789" - - def match_json(request): - expected = { - "data": { - "lead_id": lead_id, - "owner_id": ZenDeskSell.OWNER_ID, - "create_deal": False, - } - } - - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == "Bearer zendesksell-api-key" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - expected_contact_id = "1234567890" - rmock.request( - "GET", - url=f"https://zendesksell-test.com/v2/leads?email={sample_service.users[0].email_address}", - headers={"Accept": "application/json", "Content-Type": "application/json"}, - status_code=200, - text=json.dumps({"items": [{"data": {"id": lead_id}}]}), - ) - rmock.request( - "POST", - url="https://zendesksell-test.com/v2/lead_conversions", - headers={"Accept": "application/json", "Content-Type": "application/json"}, - additional_matcher=match_json, - status_code=200, - text=json.dumps({"data": {"individual_id": expected_contact_id}}), - ) - - with notify_api.app_context(): - contact_id = ZenDeskSell().convert_lead_to_contact(sample_service.users[0]) - assert contact_id == expected_contact_id - - -def test_convert_lead_to_contact_search_fails(notify_api: Flask, sample_service: Service, mocker: MockFixture): - - with notify_api.app_context(): - search_lead_id_mock = mocker.patch("app.user.rest.ZenDeskSell.search_lead_id", return_value=None) - contact_id = ZenDeskSell().convert_lead_to_contact(sample_service.users[0]) - search_lead_id_mock.assert_called_once_with(sample_service.users[0]) - assert not contact_id - - -def test_delete_contact(notify_api: Flask): - def match_header(request): - return request.headers.get("Authorization") == "Bearer zendesksell-api-key" - - with requests_mock.mock() as rmock: - contact_id = "123456789" - rmock.request( - "DELETE", - url=f"https://zendesksell-test.com/v2/contacts/{contact_id}", - headers={"Accept": "application/json", "Content-Type": "application/json"}, - additional_matcher=match_header, - status_code=200, - ) - - with notify_api.app_context(): - # as long as it doesn't throw we are OK as this is a best effort method - ZenDeskSell().delete_contact(contact_id) - - -def test_create_deal(notify_api: Flask, sample_service: Service): - def match_json(request): - expected = { - "data": { - "contact_id": "123456789", - "name": "Sample service", - "stage_id": 123456789, - "owner_id": ZenDeskSell.OWNER_ID, - "custom_fields": {"notify_service_id": str(sample_service.id)}, - } - } - - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == "Bearer zendesksell-api-key" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - contact_id = "123456789" - expected_deal_id = "987654321" - resp_data = {"data": {"id": expected_deal_id, "contact_id": contact_id}} - rmock.request( - "POST", - url=f"https://zendesksell-test.com/v2/deals/upsert?" f"custom_fields[notify_service_id]={str(sample_service.id)}", - headers={"Accept": "application/json", "Content-Type": "application/json"}, - additional_matcher=match_json, - status_code=200, - text=json.dumps(resp_data), - ) - - with notify_api.app_context(): - deal_id = ZenDeskSell().upsert_deal(contact_id, sample_service, 123456789) - assert expected_deal_id == deal_id - - -@pytest.mark.parametrize( - "expected_resp_data", - [ - {"blank": "blank"}, - {"data": {"blank": "blank"}}, - ], -) -def test_create_deal_invalid_response( - notify_api: Flask, - sample_service: Service, - expected_resp_data: Dict[str, Dict[str, Union[int, str]]], -): - def match_json(request): - expected = { - "data": { - "contact_id": "123456789", - "name": "Sample service", - "stage_id": 123456789, - "owner_id": ZenDeskSell.OWNER_ID, - "custom_fields": {"notify_service_id": str(sample_service.id)}, - } - } - - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == "Bearer zendesksell-api-key" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - contact_id = "123456789" - rmock.request( - "POST", - url=f"https://zendesksell-test.com/v2/deals/upsert?" f"custom_fields[notify_service_id]={str(sample_service.id)}", - headers={"Accept": "application/json", "Content-Type": "application/json"}, - additional_matcher=match_json, - status_code=200, - text=json.dumps(expected_resp_data), - ) - - with notify_api.app_context(): - deal_id = ZenDeskSell().upsert_deal(contact_id, sample_service, 123456789) - assert not deal_id - - -def test_create_note(notify_api: Flask): - resource_id = "1" - - def match_json(request): - expected = { - "data": { - "resource_type": "deal", - "resource_id": resource_id, - "content": "\n".join( - [ - "Live Notes", - "service_name just requested to go live.", - "", - "- Department/org: department_org_name", - "- Intended recipients: intended_recipients", - "- Purpose: main_use_case", - "- Notification types: notification_types", - "- Expected monthly volume: expected_volume", - "---", - "service_url", - ] - ), - } - } - - json_matches = request.json() == expected - basic_auth_header = request.headers.get("Authorization") == "Bearer zendesksell-api-key" - - return json_matches and basic_auth_header - - with requests_mock.mock() as rmock: - expected_note_id = "1" - resp_data = {"data": {"id": expected_note_id}} - rmock.request( - "POST", - url="https://zendesksell-test.com/v2/notes", - headers={"Accept": "application/json", "Content-Type": "application/json"}, - additional_matcher=match_json, - status_code=200, - text=json.dumps(resp_data), - ) - - data: Dict[str, Any] = { - "email_address": "test@email.com", - "service_name": "service_name", - "department_org_name": "department_org_name", - "intended_recipients": "intended_recipients", - "main_use_case": "main_use_case", - "notification_types": "notification_types", - "expected_volume": "expected_volume", - "service_url": "service_url", - "support_type": "go_live_request", - } - - with notify_api.app_context(): - note_id = ZenDeskSell().create_note(ZenDeskSell.NoteResourceType.DEAL, resource_id, ContactRequest(**data)) - assert expected_note_id == note_id - - -@pytest.mark.parametrize( - "expected_resp_data", - [ - {"blank": "blank"}, - {"data": {"blank": "blank"}}, - ], -) -def test_create_note_invalid_response( - notify_api: Flask, - sample_service: Service, - expected_resp_data: Dict[str, Dict[str, Union[int, str]]], -): - - with requests_mock.mock() as rmock: - rmock.request( - "POST", - url="https://zendesksell-test.com/v2/notes", - headers={"Accept": "application/json", "Content-Type": "application/json"}, - status_code=200, - text=json.dumps(expected_resp_data), - ) - - data: Dict[str, Any] = { - "email_address": "test@email.com", - "service_name": "service_name", - "department_org_name": "department_org_name", - "intended_recipients": "intended_recipients", - "main_use_case": "main_use_case", - "notification_types": "notification_types", - "expected_volume": "expected_volume", - "service_url": "service_url", - "support_type": "go_live_request", - } - - with notify_api.app_context(): - note_id = ZenDeskSell().create_note(ZenDeskSell.NoteResourceType.DEAL, "1", ContactRequest(**data)) - assert not note_id - - -@pytest.mark.parametrize("is_go_live,existing_contact_id", [(False, None), (False, "1"), (True, None)]) -def test_create_service_or_go_live_contact_fail( - notify_api: Flask, - sample_service: Service, - mocker: MockFixture, - is_go_live: bool, - existing_contact_id: Optional[str], -): - - upsert_contact_mock = mocker.patch("app.user.rest.ZenDeskSell.upsert_contact", return_value=(None, False)) - convert_lead_to_contact_mock = mocker.patch( - "app.user.rest.ZenDeskSell.convert_lead_to_contact", - return_value=existing_contact_id, - ) - - with notify_api.app_context(): - if is_go_live: - assert not ZenDeskSell().send_go_live_service(sample_service, sample_service.users[0]) - upsert_contact_mock.assert_called_once_with(sample_service.users[0], existing_contact_id) - else: - assert not ZenDeskSell().send_create_service(sample_service, sample_service.users[0]) - convert_lead_to_contact_mock.assert_called_once_with(sample_service.users[0]) - upsert_contact_mock.assert_called_once_with(sample_service.users[0], existing_contact_id) - - -@pytest.mark.parametrize("is_go_live,existing_contact_id", [(False, None), (False, "2"), (True, None)]) -def test_create_service_or_go_live_deal_fail( - notify_api: Flask, - sample_service: Service, - mocker: MockFixture, - is_go_live: bool, - existing_contact_id: Optional[str], -): - - with requests_mock.mock() as rmock: - contact_id = existing_contact_id or "1" - rmock.request( - contact_http_method(existing_contact_id), - url=generate_contact_url(existing_contact_id, sample_service), - headers={"Accept": "application/json", "Content-Type": "application/json"}, - status_code=200, - text=json.dumps({"data": {"id": contact_id, "created_at": "1", "updated_at": "1"}}), - ) - - mocker.patch("app.user.rest.ZenDeskSell.upsert_deal", return_value=None) - mocker.patch( - "app.user.rest.ZenDeskSell.convert_lead_to_contact", - return_value=existing_contact_id, - ) - contact_delete_mock = mocker.patch("app.user.rest.ZenDeskSell.delete_contact") - with notify_api.app_context(): - if is_go_live: - assert not ZenDeskSell().send_go_live_service(sample_service, sample_service.users[0]) - else: - assert not ZenDeskSell().send_create_service(sample_service, sample_service.users[0]) - - contact_delete_mock.assert_called_once_with(contact_id) - - -@pytest.mark.parametrize("is_go_live,existing_contact_id", [(False, None), (False, "1"), (True, None)]) -def test_create_service_or_go_live_deal_fail_contact_exists( - notify_api: Flask, - sample_service: Service, - mocker: MockFixture, - is_go_live: bool, - existing_contact_id: Optional[str], -): - with requests_mock.mock() as rmock: - contact_id = existing_contact_id or "1" - rmock.request( - contact_http_method(existing_contact_id), - url=generate_contact_url(existing_contact_id, sample_service), - headers={"Accept": "application/json", "Content-Type": "application/json"}, - status_code=200, - text=json.dumps({"data": {"id": contact_id, "created_at": "1", "updated_at": "2"}}), - ) - - mocker.patch("app.user.rest.ZenDeskSell.upsert_deal", return_value=None) - mocker.patch( - "app.user.rest.ZenDeskSell.convert_lead_to_contact", - return_value=existing_contact_id, - ) - contact_delete_mock = mocker.patch("app.user.rest.ZenDeskSell.delete_contact") - with notify_api.app_context(): - if is_go_live: - assert not ZenDeskSell().send_go_live_service(sample_service, sample_service.users[0]) - else: - assert not ZenDeskSell().send_create_service(sample_service, sample_service.users[0]) - - contact_delete_mock.assert_not_called() - - -@pytest.mark.parametrize("existing_contact_id", [None, "2"]) -def test_send_create_service( - notify_api: Flask, - sample_service: Service, - mocker: MockFixture, - existing_contact_id: Optional[str], -): - - contact_id = existing_contact_id or "1" - upsert_contact_mock = mocker.patch("app.user.rest.ZenDeskSell.upsert_contact", return_value=(contact_id, True)) - convert_lead_to_contact_mock = mocker.patch( - "app.user.rest.ZenDeskSell.convert_lead_to_contact", - return_value=existing_contact_id, - ) - upsert_deal_mock = mocker.patch("app.user.rest.ZenDeskSell.upsert_deal", return_value=1) - with notify_api.app_context(): - assert ZenDeskSell().send_create_service(sample_service, sample_service.users[0]) - convert_lead_to_contact_mock.assert_called_once_with(sample_service.users[0]) - upsert_contact_mock.assert_called_once_with(sample_service.users[0], existing_contact_id) - upsert_deal_mock.assert_called_once_with(contact_id, sample_service, ZenDeskSell.STATUS_CREATE_TRIAL) - - -def test_send_go_live_request(notify_api: Flask, sample_service: Service, mocker: MockFixture): - deal_id = "1" - search_deal_id_mock = mocker.patch("app.user.rest.ZenDeskSell.search_deal_id", return_value=deal_id) - send_create_service_mock = mocker.patch("app.user.rest.ZenDeskSell.send_create_service", return_value="1") - create_note_mock = mocker.patch("app.user.rest.ZenDeskSell.create_note", return_value="2") - data: Dict[str, Any] = { - "email_address": "test@email.com", - "service_name": "service_name", - "department_org_name": "department_org_name", - "intended_recipients": "intended_recipients", - "main_use_case": "main_use_case", - "notification_types": "notification_types", - "expected_volume": "expected_volume", - "service_url": "service_url", - "support_type": "go_live_request", - } - - contact = ContactRequest(**data) - - with notify_api.app_context(): - assert ZenDeskSell().send_go_live_request(sample_service, sample_service.users[0], contact) - search_deal_id_mock.assert_called_once_with(sample_service) - send_create_service_mock.assert_not_called() - create_note_mock.assert_called_once_with(ZenDeskSell.NoteResourceType.DEAL, deal_id, contact) - - -def test_send_go_live_request_search_failed(notify_api: Flask, sample_service: Service, mocker: MockFixture): - deal_id = "1" - search_deal_id_mock = mocker.patch("app.user.rest.ZenDeskSell.search_deal_id", return_value=None) - send_create_service_mock = mocker.patch("app.user.rest.ZenDeskSell.send_create_service", return_value=deal_id) - create_note_mock = mocker.patch("app.user.rest.ZenDeskSell.create_note", return_value="1") - data: Dict[str, Any] = { - "email_address": "test@email.com", - "service_name": "service_name", - "department_org_name": "department_org_name", - "intended_recipients": "intended_recipients", - "main_use_case": "main_use_case", - "notification_types": "notification_types", - "expected_volume": "expected_volume", - "service_url": "service_url", - "support_type": "go_live_request", - } - contact = ContactRequest(**data) - - with notify_api.app_context(): - assert ZenDeskSell().send_go_live_request(sample_service, sample_service.users[0], contact) - search_deal_id_mock.assert_called_once_with(sample_service) - send_create_service_mock.assert_called_once_with(sample_service, sample_service.users[0]) - create_note_mock.assert_called_once_with(ZenDeskSell.NoteResourceType.DEAL, deal_id, contact) - - -def test_send_go_live_service(notify_api: Flask, sample_service: Service, mocker: MockFixture): - - contact_id = 1 - upsert_contact_mock = mocker.patch("app.user.rest.ZenDeskSell.upsert_contact", return_value=(contact_id, True)) - upsert_deal_mock = mocker.patch("app.user.rest.ZenDeskSell.upsert_deal", return_value=1) - with notify_api.app_context(): - assert ZenDeskSell().send_go_live_service(sample_service, sample_service.users[0]) - upsert_contact_mock.assert_called_once_with(sample_service.users[0], None) - upsert_deal_mock.assert_called_once_with(contact_id, sample_service, ZenDeskSell.STATUS_CLOSE_LIVE) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 78d85dd740..a0b428eed0 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -1,4 +1,6 @@ import json +import random +import string import uuid from datetime import datetime, timedelta @@ -19,6 +21,7 @@ from app.dao.organisation_dao import dao_create_organisation from app.dao.provider_rates_dao import create_provider_rates from app.dao.services_dao import dao_add_user_to_service, dao_create_service +from app.dao.template_categories_dao import dao_create_template_category from app.dao.templates_dao import dao_create_template from app.dao.users_dao import create_secret_code, create_user_code from app.history_meta import create_history @@ -50,6 +53,7 @@ ServiceEmailReplyTo, ServiceSafelist, Template, + TemplateCategory, TemplateHistory, ) from tests import create_authorization_header @@ -142,14 +146,14 @@ def sample_sms_code(notify_db, notify_db_session, code=None, code_type="sms", us return code -@pytest.fixture(scope="function") -def sample_service( +def create_sample_service( notify_db, notify_db_session, service_name="Sample service", user=None, restricted=False, limit=1000, + sms_limit=1000, email_from=None, permissions=None, research_mode=None, @@ -162,6 +166,7 @@ def sample_service( data = { "name": service_name, "message_limit": limit, + "sms_daily_limit": sms_limit, "restricted": restricted, "email_from": email_from, "created_by": user, @@ -185,6 +190,31 @@ def sample_service( return service +@pytest.fixture(scope="function") +def sample_service( + notify_db, + notify_db_session, + service_name="Sample service", + user=None, + restricted=False, + limit=1000, + email_from=None, + permissions=None, + research_mode=None, +): + return create_sample_service( + notify_db, + notify_db_session, + service_name="Sample service", + user=None, + restricted=False, + limit=1000, + email_from=None, + permissions=None, + research_mode=None, + ) + + @pytest.fixture(scope="function", name="sample_service_full_permissions") def _sample_service_full_permissions(notify_db_session): service = create_service( @@ -203,7 +233,98 @@ def _sample_service_custom_letter_contact_block(sample_service): @pytest.fixture(scope="function") -def sample_template( +def sample_template_category_with_templates(notify_db, notify_db_session, sample_template_category): + create_sample_template(notify_db, notify_db_session, template_category=sample_template_category) + create_sample_template(notify_db, notify_db_session, template_category=sample_template_category) + return sample_template_category + + +@pytest.fixture(scope="function") +def populate_generic_categories(notify_db_session): + generic_categories = [ + { + "id": current_app.config["DEFAULT_TEMPLATE_CATEGORY_LOW"], + "name_en": "Low Category (Bulk)", + "name_fr": "Catégorie Basse (En Vrac)", + "sms_process_type": "low", + "email_process_type": "low", + "hidden": True, + }, + { + "id": current_app.config["DEFAULT_TEMPLATE_CATEGORY_MEDIUM"], + "name_en": "Medium Category (Normal)", + "name_fr": "Catégorie Moyenne (Normale)", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": True, + }, + { + "id": current_app.config["DEFAULT_TEMPLATE_CATEGORY_HIGH"], + "name_en": "High Category (Priority)", + "name_fr": "Catégorie Haute (Priorité)", + "sms_process_type": "high", + "email_process_type": "high", + "hidden": True, + }, + ] + for category in generic_categories: + dao_create_template_category(TemplateCategory(**category)) + + yield + + +@pytest.fixture(scope="function") +def sample_template_category( + notify_db, + notify_db_session, + name_en="Category Name", + name_fr="Category Name (FR)", + description_en="Category Description", + description_fr="Category Description (FR)", + sms_process_type="normal", + email_process_type="normal", + hidden=False, +): + return create_template_category( + notify_db, + notify_db_session, + name_en="Category Name", + name_fr="Category Name (FR)", + description_en="Category Description", + description_fr="Category Description (FR)", + sms_process_type="normal", + email_process_type="normal", + hidden=False, + ) + + +def create_template_category( + notify_db, + notify_db_session, + name_en="Category Name", + name_fr="Category Name (FR)", + description_en="Category Description", + description_fr="Category Description (FR)", + sms_process_type="normal", + email_process_type="normal", + hidden=False, +): + data = { + "name_en": name_en, + "name_fr": name_fr, + "description_en": description_en, + "description_fr": description_fr, + "sms_process_type": sms_process_type, + "email_process_type": email_process_type, + "hidden": hidden, + } + template_category = TemplateCategory(**data) + dao_create_template_category(template_category) + + return template_category + + +def create_sample_template( notify_db, notify_db_session, template_name="Template Name", @@ -214,6 +335,7 @@ def sample_template( subject_line="Subject", user=None, service=None, + template_category=None, created_by=None, process_type="normal", permissions=[EMAIL_TYPE, SMS_TYPE], @@ -241,6 +363,11 @@ def sample_template( data.update({"subject": subject_line}) if template_type == "letter": data["postage"] = "second" + if template_category: + data["template_category"] = template_category + else: + cat = create_template_category(notify_db, notify_db_session, name_en=str(uuid.uuid4), name_fr=str(uuid.uuid4)) + data.update({"template_category_id": cat.id}) template = Template(**data) dao_create_template(template) @@ -248,13 +375,80 @@ def sample_template( @pytest.fixture(scope="function") -def sample_template_without_sms_permission(notify_db, notify_db_session): - return sample_template(notify_db, notify_db_session, permissions=[EMAIL_TYPE]) +def sample_template( + notify_db, + notify_db_session, + template_name="Template Name", + template_type="sms", + content="This is a template:\nwith a newline", + archived=False, + hidden=False, + subject_line="Subject", + user=None, + service=None, + created_by=None, + process_type="normal", + permissions=[EMAIL_TYPE, SMS_TYPE], +): + return create_sample_template( + notify_db, + notify_db_session, + template_name="Template Name", + template_type="sms", + content="This is a template:\nwith a newline", + archived=False, + hidden=False, + subject_line="Subject", + user=None, + service=None, + created_by=None, + process_type="normal", + template_category=None, + permissions=[EMAIL_TYPE, SMS_TYPE], + ) @pytest.fixture(scope="function") -def sample_template_without_letter_permission(notify_db, notify_db_session): - return sample_template(notify_db, notify_db_session, template_type="letter", permissions=[EMAIL_TYPE]) +def sample_template_with_priority_override( + notify_db, + notify_db_session, + sample_template_category, + template_name="Template Name", + template_type="sms", + content="This is a template:\nwith a newline", + archived=False, + hidden=False, + subject_line="Subject", + user=None, + service=None, + created_by=None, + process_type="priority", + permissions=[EMAIL_TYPE, SMS_TYPE], +): + return create_sample_template( + notify_db, + notify_db_session, + template_name="Template Name", + template_type="sms", + content="This is a template:\nwith a newline", + archived=False, + hidden=False, + subject_line="Subject", + user=None, + service=None, + created_by=None, + process_type="priority", + template_category=sample_template_category, + permissions=[EMAIL_TYPE, SMS_TYPE], + ) + + +def create_sample_template_without_sms_permission(notify_db, notify_db_session): + return create_sample_template(notify_db, notify_db_session, permissions=[EMAIL_TYPE]) + + +def create_sample_template_without_letter_permission(notify_db, notify_db_session): + return create_sample_template(notify_db, notify_db_session, template_type="letter", permissions=[EMAIL_TYPE]) @pytest.fixture(scope="function") @@ -269,8 +463,7 @@ def sample_sms_template_with_html(sample_service): return create_template(sample_service, content="Hello (( Name))\nHere is some HTML & entities") -@pytest.fixture(scope="function") -def sample_email_template( +def create_sample_email_template( notify_db, notify_db_session, template_name="Email Template Name", @@ -303,8 +496,32 @@ def sample_email_template( @pytest.fixture(scope="function") -def sample_template_without_email_permission(notify_db, notify_db_session): - return sample_email_template(notify_db, notify_db_session, permissions=[SMS_TYPE]) +def sample_email_template( + notify_db, + notify_db_session, + template_name="Email Template Name", + template_type="email", + user=None, + content="This is a template", + subject_line="Email Subject", + service=None, + permissions=[EMAIL_TYPE, SMS_TYPE], +): + return create_sample_email_template( + notify_db, + notify_db_session, + template_name, + template_type, + user, + content, + subject_line, + service=None, + permissions=[EMAIL_TYPE, SMS_TYPE], + ) + + +def create_sample_template_without_email_permission(notify_db, notify_db_session): + return create_sample_email_template(notify_db, notify_db_session, permissions=[SMS_TYPE]) @pytest.fixture @@ -320,7 +537,7 @@ def sample_trial_letter_template(sample_service_full_permissions): @pytest.fixture(scope="function") def sample_email_template_with_placeholders(notify_db, notify_db_session): - return sample_email_template( + return create_sample_email_template( notify_db, notify_db_session, content="Hello ((name))\nThis is an email from GOV.UK", @@ -330,7 +547,7 @@ def sample_email_template_with_placeholders(notify_db, notify_db_session): @pytest.fixture(scope="function") def sample_email_template_with_html(notify_db, notify_db_session): - return sample_email_template( + return create_sample_email_template( notify_db, notify_db_session, content="Hello ((name))\nThis is an email from GOV.UK with some HTML", @@ -340,7 +557,7 @@ def sample_email_template_with_html(notify_db, notify_db_session): @pytest.fixture(scope="function") def sample_email_template_with_advanced_html(notify_db, notify_db_session): - return sample_email_template( + return create_sample_email_template( notify_db, notify_db_session, content="
((name)) some HTML that should be right aligned
", @@ -348,8 +565,7 @@ def sample_email_template_with_advanced_html(notify_db, notify_db_session): ) -@pytest.fixture(scope="function") -def sample_api_key(notify_db, notify_db_session, service=None, key_type=KEY_TYPE_NORMAL, name=None): +def create_sample_api_key(notify_db, notify_db_session, service=None, key_type=KEY_TYPE_NORMAL, name=None): if service is None: service = create_service(check_if_service_exists=True) data = { @@ -363,18 +579,22 @@ def sample_api_key(notify_db, notify_db_session, service=None, key_type=KEY_TYPE return api_key +@pytest.fixture(scope="function") +def sample_api_key(notify_db, notify_db_session, service=None, key_type=KEY_TYPE_NORMAL): + return create_sample_api_key(notify_db, notify_db_session, service, key_type) + + @pytest.fixture(scope="function") def sample_test_api_key(notify_db, notify_db_session, service=None): - return sample_api_key(notify_db, notify_db_session, service, KEY_TYPE_TEST) + return create_sample_api_key(notify_db, notify_db_session, service, KEY_TYPE_TEST) @pytest.fixture(scope="function") def sample_team_api_key(notify_db, notify_db_session, service=None): - return sample_api_key(notify_db, notify_db_session, service, KEY_TYPE_TEAM) + return create_sample_api_key(notify_db, notify_db_session, service, KEY_TYPE_TEAM) -@pytest.fixture(scope="function") -def sample_job( +def create_sample_job( notify_db, notify_db_session, service=None, @@ -411,6 +631,35 @@ def sample_job( return job +@pytest.fixture(scope="function") +def sample_job( + notify_db, + notify_db_session, + service=None, + template=None, + notification_count=1, + created_at=None, + job_status="pending", + scheduled_for=None, + processing_started=None, + original_file_name="some.csv", + archived=False, +): + return create_sample_job( + notify_db, + notify_db_session, + service=None, + template=None, + notification_count=1, + created_at=None, + job_status="pending", + scheduled_for=None, + processing_started=None, + original_file_name="some.csv", + archived=False, + ) + + @pytest.fixture(scope="function") def sample_job_with_placeholdered_template( sample_job, @@ -435,7 +684,7 @@ def sample_email_job(notify_db, notify_db_session, service=None, template=None): if service is None: service = create_service(check_if_service_exists=True) if template is None: - template = sample_email_template(notify_db, notify_db_session, service=service) + template = create_sample_email_template(notify_db, notify_db_session, service=service) job_id = uuid.uuid4() data = { "id": job_id, @@ -471,8 +720,7 @@ def sample_letter_job(sample_letter_template): return job -@pytest.fixture(scope="function") -def sample_notification_with_job( +def create_sample_notification_with_job( notify_db, notify_db_session, service=None, @@ -514,7 +762,43 @@ def sample_notification_with_job( @pytest.fixture(scope="function") -def sample_notification( +def sample_notification_with_job( + notify_db, + notify_db_session, + service=None, + template=None, + job=None, + job_row_number=None, + to_field=None, + status="created", + reference=None, + created_at=None, + sent_at=None, + billable_units=1, + personalisation=None, + api_key=None, + key_type=KEY_TYPE_NORMAL, +): + return create_sample_notification_with_job( + notify_db, + notify_db_session, + service, + template, + job, + job_row_number, + to_field, + status, + reference, + created_at, + sent_at, + billable_units, + personalisation, + api_key, + key_type, + ) + + +def create_sample_notification( notify_db, notify_db_session, service=None, @@ -538,6 +822,7 @@ def sample_notification( scheduled_for=None, normalised_to=None, postage=None, + queue_name=None, ): if created_at is None: created_at = datetime.utcnow() @@ -585,6 +870,7 @@ def sample_notification( "rate_multiplier": rate_multiplier, "normalised_to": normalised_to, "postage": postage, + "queue_name": queue_name, } if job_row_number is not None: data["job_row_number"] = job_row_number @@ -604,6 +890,61 @@ def sample_notification( return notification +@pytest.fixture(scope="function") +def sample_notification( + notify_db, + notify_db_session, + service=None, + template=None, + job=None, + job_row_number=None, + to_field=None, + status="created", + provider_response=None, + reference=None, + created_at=None, + sent_at=None, + billable_units=1, + personalisation=None, + api_key=None, + key_type=KEY_TYPE_NORMAL, + sent_by=None, + international=False, + client_reference=None, + rate_multiplier=1.0, + scheduled_for=None, + normalised_to=None, + postage=None, + queue_name=None, +): + return create_sample_notification( + notify_db, + notify_db_session, + service=None, + template=None, + job=None, + job_row_number=None, + to_field=None, + status="created", + provider_response=None, + reference=None, + created_at=None, + sent_at=None, + billable_units=1, + personalisation=None, + api_key=None, + key_type=KEY_TYPE_NORMAL, + sent_by=None, + international=False, + client_reference=None, + rate_multiplier=1.0, + scheduled_for=None, + normalised_to=None, + postage=None, + queue_name=None, + ) + + @pytest.fixture def sample_letter_notification(sample_letter_template): address = { @@ -622,8 +963,8 @@ def sample_letter_notification(sample_letter_template): def sample_email_notification(notify_db, notify_db_session): created_at = datetime.utcnow() service = create_service(check_if_service_exists=True) - template = sample_email_template(notify_db, notify_db_session, service=service) - job = sample_job(notify_db, notify_db_session, service=service, template=template) + template = create_sample_email_template(notify_db, notify_db_session, service=service) + job = create_sample_job(notify_db, notify_db_session, service=service, template=template) notification_id = uuid.uuid4() @@ -714,12 +1055,10 @@ def mock_celery_send_email(mocker): @pytest.fixture(scope="function") def mock_encryption(mocker): - return mocker.patch("app.encryption.encrypt", return_value="something_encrypted") + return mocker.patch("app.encryption.CryptoSigner.sign", return_value="something_encrypted") -@pytest.fixture(scope="function") -def sample_invited_user(notify_db, notify_db_session, service=None, to_email_address=None): - +def create_sample_invited_user(notify_db, notify_db_session, service=None, to_email_address=None): if service is None: service = create_service(check_if_service_exists=True) if to_email_address is None: @@ -739,13 +1078,17 @@ def sample_invited_user(notify_db, notify_db_session, service=None, to_email_add return invited_user +@pytest.fixture(scope="function") +def sample_invited_user(notify_db, notify_db_session, service=None, to_email_address=None): + return create_sample_invited_user(notify_db, notify_db_session, service, to_email_address) + + @pytest.fixture(scope="function") def sample_invited_org_user(notify_db, notify_db_session, sample_user, sample_organisation): return create_invited_org_user(sample_organisation, sample_user) -@pytest.fixture(scope="function") -def sample_user_service_permission(notify_db, notify_db_session, service=None, user=None, permission="manage_settings"): +def create_sample_user_service_permission(notify_db, notify_db_session, service=None, user=None, permission="manage_settings"): if user is None: user = create_user() if service is None: @@ -763,6 +1106,11 @@ def sample_user_service_permission(notify_db, notify_db_session, service=None, u return p_model +@pytest.fixture(scope="function") +def sample_user_service_permission(notify_db, notify_db_session, service=None, user=None, permission="manage_settings"): + return create_sample_user_service_permission(notify_db, notify_db_session, service, user, permission) + + @pytest.fixture(scope="function") def fake_uuid(): return "6ce466d0-fd6a-11e5-82f5-e0accb9d11a6" @@ -857,6 +1205,20 @@ def password_reset_email_template(notify_db, notify_db_session): ) +@pytest.fixture(scope="function") +def forced_password_reset_email_template(notify_db, notify_db_session): + service, user = notify_service(notify_db, notify_db_session) + + return create_custom_template( + service=service, + user=user, + template_config_name="FORCED_PASSWORD_RESET_TEMPLATE_ID", + content="((user_name)) you can reset password by clicking ((url))", + subject="Forced reset your password", + template_type="email", + ) + + @pytest.fixture(scope="function") def verify_reply_to_address_email_template(notify_db, notify_db_session): service, user = notify_service(notify_db, notify_db_session) @@ -999,6 +1361,19 @@ def mou_signed_templates(notify_db, notify_db_session): } +@pytest.fixture(scope="function") +def contact_form_email_template(notify_db, notify_db_session): + service, user = notify_service(notify_db, notify_db_session) + return create_custom_template( + service=service, + user=user, + template_config_name="CONTACT_FORM_DIRECT_EMAIL_TEMPLATE_ID", + content=("contact form subbmission ((contact_us_content))"), + subject="Contact form", + template_type="email", + ) + + def create_custom_template(service, user, template_config_name, template_type, content="", subject=None): template = Template.query.get(current_app.config[template_config_name]) if not template: @@ -1026,6 +1401,7 @@ def notify_service(notify_db, notify_db_session): service = Service( name="Notify Service", message_limit=1000, + sms_daily_limit=1000, restricted=False, email_from="notify.service", created_by=user, @@ -1050,8 +1426,7 @@ def notify_service(notify_db, notify_db_session): return service, user -@pytest.fixture(scope="function") -def sample_service_safelist(notify_db, notify_db_session, service=None, email_address=None, mobile_number=None): +def create_sample_service_safelist(notify_db, notify_db_session, service=None, email_address=None, mobile_number=None): if service is None: service = create_service(check_if_service_exists=True) @@ -1067,6 +1442,11 @@ def sample_service_safelist(notify_db, notify_db_session, service=None, email_ad return safelisted_user +@pytest.fixture(scope="function") +def sample_service_safelist(notify_db, notify_db_session, service=None, email_address=None, mobile_number=None): + return create_sample_service_safelist(notify_db, notify_db_session, service, email_address, mobile_number) + + @pytest.fixture(scope="function") def sample_provider_rate(notify_db, notify_db_session, valid_from=None, rate=None, provider_identifier=None): create_provider_rates( @@ -1198,6 +1578,12 @@ def app_statsd(mocker): return current_app +@pytest.fixture(scope="function") +def app_bounce_rate_client(mocker): + current_app.bounce_rate_client = mocker.Mock() + return current_app + + def datetime_in_past(days=0, seconds=0): return datetime.now(tz=pytz.utc) - timedelta(days=days, seconds=seconds) @@ -1209,7 +1595,6 @@ def document_download_response(override={}): "id": "document-id", "direct_file_url": "http://direct-file-url.localdomain", "url": "http://frontend-url.localdomain", - "mlwr_sid": "mlwr-sid", "filename": "filename", "sending_method": "sending_method", "mime_type": "mime_type", @@ -1218,3 +1603,7 @@ def document_download_response(override={}): } return {"status": "ok", "document": base | override} + + +def random_sized_content(chars=string.ascii_uppercase + string.digits, size=10): + return "".join(random.choice(chars) for _ in range(size)) diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index 54a1781831..145915037e 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -4,6 +4,7 @@ import pytest from freezegun import freeze_time +from itsdangerous import BadSignature from sqlalchemy.exc import IntegrityError, SQLAlchemyError from sqlalchemy.orm.exc import NoResultFound @@ -23,13 +24,16 @@ dao_update_notification, dao_update_notifications_by_reference, delete_notifications_older_than_retention_by_type, + get_latest_sent_notification_for_job, get_notification_by_id, + get_notification_count_for_job, get_notification_for_job, get_notification_with_personalisation, get_notifications_for_job, get_notifications_for_service, is_delivery_slow_for_provider, notifications_not_yet_sent, + resign_notifications, send_method_stats_by_service, set_scheduled_notification_to_processed, update_notification_status_by_id, @@ -38,6 +42,7 @@ from app.dao.organisation_dao import dao_add_service_to_organisation from app.models import ( JOB_STATUS_IN_PROGRESS, + JOB_STATUS_PENDING, KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, @@ -63,6 +68,7 @@ save_notification, save_scheduled_notification, ) +from tests.conftest import set_signer_secret_key def test_should_have_decorated_notifications_dao_functions(): @@ -72,6 +78,7 @@ def test_should_have_decorated_notifications_dao_functions(): assert dao_update_notification.__wrapped__.__name__ == "dao_update_notification" # noqa assert update_notification_status_by_reference.__wrapped__.__name__ == "update_notification_status_by_reference" # noqa assert get_notification_for_job.__wrapped__.__name__ == "get_notification_for_job" # noqa + assert get_notification_count_for_job.__wrapped__.__name__ == "get_notification_count_for_job" # noqa assert get_notifications_for_job.__wrapped__.__name__ == "get_notifications_for_job" # noqa assert get_notification_with_personalisation.__wrapped__.__name__ == "get_notification_with_personalisation" # noqa assert get_notifications_for_service.__wrapped__.__name__ == "get_notifications_for_service" # noqa @@ -439,10 +446,10 @@ def test_save_notification_and_increment_job(sample_template, sample_job): def test_save_notification_and_increment_correct_job(notify_db, notify_db_session, sample_template): - from tests.app.conftest import sample_job + from tests.app.conftest import create_sample_job - job_1 = sample_job(notify_db, notify_db_session, sample_template.service) - job_2 = sample_job(notify_db, notify_db_session, sample_template.service) + job_1 = create_sample_job(notify_db, notify_db_session, sample_template.service) + job_2 = create_sample_job(notify_db, notify_db_session, sample_template.service) assert Notification.query.count() == 0 data = _notification_json(sample_template, job_id=job_1.id) @@ -490,6 +497,12 @@ def test_get_notification_with_personalisation_by_id(sample_template): assert notification_from_db.scheduled_notification.scheduled_for == datetime(2017, 5, 5, 14, 15) +def test_get_notification_with_personalisation_by_id_no_result(sample_template, fake_uuid, mocker): + mock_logger = mocker.patch("app.authentication.auth.current_app.logger.warning") + assert get_notification_with_personalisation(sample_template.service.id, fake_uuid, key_type=None) is None + assert mock_logger.called + + def test_get_notification_by_id_when_notification_exists(sample_notification): notification_from_db = get_notification_by_id(sample_notification.id) @@ -559,6 +572,44 @@ def test_get_all_notifications_for_job(sample_job): assert len(notifications_from_db) == 5 +def test_get_latest_sent_notification_for_job_partially_processed_job(sample_job): + one_s = timedelta(seconds=1) + now = datetime.utcnow() + + test_data = [ + (now - 5 * one_s, "sent"), + (now - 4 * one_s, "sent"), + (now - 3 * one_s, "sent"), + (now - 2 * one_s, "pending"), + (now - 1 * one_s, "pending"), + (now, "sent"), + ] + + for updated_at, status in test_data: + save_notification(create_notification(template=sample_job.template, job=sample_job, status=status, updated_at=updated_at)) + + latest_sent_notification = get_latest_sent_notification_for_job(sample_job.id) + assert latest_sent_notification.updated_at == now + + +def test_get_latest_sent_notification_for_job_no_notifications(sample_template): + job = create_job(template=sample_template, notification_count=0, job_status=JOB_STATUS_PENDING) + + latest_sent_notification = get_latest_sent_notification_for_job(job.id) + assert latest_sent_notification is None + + +def test_get_notification_count_for_job(sample_job): + for i in range(0, 7): + try: + save_notification(create_notification(template=sample_job.template, job=sample_job)) + except IntegrityError: + pass + + notification_count_from_db = get_notification_count_for_job(sample_job.service.id, sample_job.id) + assert notification_count_from_db == 7 + + def test_get_all_notifications_for_job_by_status(sample_job): notifications = partial(get_notifications_for_job, sample_job.service.id, sample_job.id) @@ -1073,7 +1124,6 @@ def test_delivery_is_delivery_slow_for_provider_filters_out_notifications_it_sho def test_dao_get_notifications_by_to_field(sample_template): - recipient_to_search_for = { "to_field": "+16502532222", "normalised_to": "+16502532222", @@ -1171,7 +1221,6 @@ def test_dao_get_notifications_by_to_field_escapes( search_term, expected_result_count, ): - for email_address in { "foo%_@example.com", "%%bar@example.com", @@ -1225,7 +1274,6 @@ def test_dao_get_notifications_by_to_field_matches_partial_phone_numbers( sample_template, search_term, ): - notification_1 = save_notification( create_notification( template=sample_template, @@ -1336,7 +1384,6 @@ def test_dao_get_notifications_by_to_field_only_searches_one_notification_type( def test_dao_created_scheduled_notification(sample_notification): - scheduled_notification = ScheduledNotification( notification_id=sample_notification.id, scheduled_for=datetime.strptime("2017-01-05 14:15", "%Y-%m-%d %H:%M"), @@ -1517,7 +1564,6 @@ def test_dao_get_last_notification_added_for_job_id_no_notifications(sample_temp def test_dao_get_last_notification_added_for_job_id_no_job(sample_template, fake_uuid): - assert dao_get_last_notification_added_for_job_id(fake_uuid) is None @@ -1741,7 +1787,10 @@ def test_send_method_stats_by_service(sample_service, sample_organisation): assert NotificationHistory.query.count() == 5 - assert send_method_stats_by_service(datetime.utcnow() - timedelta(days=7), datetime.utcnow(),) == [ + assert send_method_stats_by_service( + datetime.utcnow() - timedelta(days=7), + datetime.utcnow(), + ) == [ ( sample_service.id, sample_service.name, @@ -1769,13 +1818,79 @@ def test_send_method_stats_by_service(sample_service, sample_organisation): ) -def test_bulk_insert_notification(sample_template): - assert len(Notification.query.all()) == 0 - n1 = create_notification(sample_template, client_reference="happy") - n1.id = None - n1.status = None - n2 = create_notification(sample_template, client_reference="sad") - n3 = create_notification(sample_template, client_reference="loud") - bulk_insert_notifications([n1, n2, n3]) - all_notifications = get_notifications_for_service(sample_template.service_id).items - assert len(all_notifications) == 3 +class TestBulkInsertNotifications: + def test_bulk_insert_notification(self, sample_template): + assert len(Notification.query.all()) == 0 + n1 = create_notification(sample_template, client_reference="happy") + n1.id = None + n1.status = None + n2 = create_notification(sample_template, client_reference="sad") + n3 = create_notification(sample_template, client_reference="loud") + bulk_insert_notifications([n1, n2, n3]) + all_notifications = get_notifications_for_service(sample_template.service_id).items + assert len(all_notifications) == 3 + + def test_bulk_insert_notification_duplicate_ids(self, sample_template): + assert len(Notification.query.all()) == 0 + n1 = create_notification(sample_template, client_reference="happy") + n2 = create_notification(sample_template, client_reference="sad") + n3 = create_notification(sample_template, client_reference="loud") + n1.id = n2.id + n1.status = n2.status + with pytest.raises(Exception): + bulk_insert_notifications([n1, n2, n3]) + assert len(get_notifications_for_service(sample_template.service_id).items) == 0 + + +class TestResigning: + @pytest.mark.parametrize("resign,chunk_size", [(True, 2), (False, 2), (True, 10), (False, 10)]) + def test_resign_notifications_resigns_or_previews(self, resign, chunk_size, sample_template_with_placeholders): + from app import signer_personalisation + + with set_signer_secret_key(signer_personalisation, ["k1", "k2"]): + initial_notifications = [ + create_notification(sample_template_with_placeholders, personalisation={"Name": "test"}) for _ in range(5) + ] + personalisations = [n.personalisation for n in initial_notifications] + _personalisations = [n._personalisation for n in initial_notifications] + for notification in initial_notifications: + save_notification(notification) + + with set_signer_secret_key(signer_personalisation, ["k2", "k3"]): + resign_notifications(chunk_size=chunk_size, resign=resign) + notifications = [Notification.query.get(n.id) for n in initial_notifications] + assert [n.personalisation for n in notifications] == personalisations # unsigned values are the same + if resign: + for ( + notification, + _personalisation, + ) in zip(notifications, _personalisations): + assert notification._personalisation != _personalisation # signature is different + else: + assert [n._personalisation for n in notifications] == _personalisations # signatures are the same + + def test_resign_notifications_fails_if_cannot_verify_signatures(self, sample_template_with_placeholders): + from app import signer_personalisation + + with set_signer_secret_key(signer_personalisation, ["k1", "k2"]): + initial_notification = create_notification(sample_template_with_placeholders, personalisation={"Name": "test"}) + save_notification(initial_notification) + + with set_signer_secret_key(signer_personalisation, ["k3"]): + with pytest.raises(BadSignature): + resign_notifications(chunk_size=10, resign=True) + + def test_resign_notifications_unsafe_resigns_with_new_key(self, sample_template_with_placeholders): + from app import signer_personalisation + + with set_signer_secret_key(signer_personalisation, ["k1", "k2"]): + initial_notification = create_notification(sample_template_with_placeholders, personalisation={"Name": "test"}) + save_notification(initial_notification) + personalisation = initial_notification.personalisation + _personalisation = initial_notification._personalisation + + with set_signer_secret_key(signer_personalisation, ["k3"]): + resign_notifications(chunk_size=10, resign=True, unsafe=True) + notification = Notification.query.get(initial_notification.id) + assert notification.personalisation == personalisation # unsigned value is the same + assert notification._personalisation != _personalisation # signature is different diff --git a/tests/app/dao/notification_dao/test_notification_dao_bounce_rate.py b/tests/app/dao/notification_dao/test_notification_dao_bounce_rate.py new file mode 100644 index 0000000000..036882b827 --- /dev/null +++ b/tests/app/dao/notification_dao/test_notification_dao_bounce_rate.py @@ -0,0 +1,145 @@ +from datetime import datetime, timedelta + +from app.dao.notifications_dao import ( + dao_create_notification, + overall_bounce_rate_for_day, + service_bounce_rate_for_day, + total_hard_bounces_grouped_by_hour, + total_notifications_grouped_by_hour, +) +from app.models import KEY_TYPE_NORMAL, NOTIFICATION_HARD_BOUNCE, Notification + + +def _notification_json(sample_template, job_id=None, id=None, status=None, feedback_type=None): + data = { + "to": "hello@world.com", + "service": sample_template.service, + "service_id": sample_template.service.id, + "template_id": sample_template.id, + "template_version": sample_template.version, + "created_at": datetime.utcnow(), + "billable_units": 1, + "notification_type": sample_template.template_type, + "key_type": KEY_TYPE_NORMAL, + } + if job_id: + data.update({"job_id": job_id}) + if id: + data.update({"id": id}) + if status: + data.update({"status": status}) + if feedback_type: + data.update({"feedback_type": feedback_type}) + return data + + +class TestBounceRate: + def test_bounce_rate_all_service(self, sample_email_template, sample_job): + assert Notification.query.count() == 0 + + data_1 = _notification_json( + sample_email_template, job_id=sample_job.id, status="permanent-failure", feedback_type=NOTIFICATION_HARD_BOUNCE + ) + data_2 = _notification_json(sample_email_template, job_id=sample_job.id, status="created") + + notification_1 = Notification(**data_1) + notification_2 = Notification(**data_2) + dao_create_notification(notification_1) + dao_create_notification(notification_2) + + assert Notification.query.count() == 2 + + result = overall_bounce_rate_for_day(2, datetime.utcnow() + timedelta(minutes=1)) + assert result[0].service_id == sample_email_template.service_id + assert result[0].total_emails == 2 + assert result[0].hard_bounces == 1 + assert result[0].bounce_rate == 50 + + def test_bounce_rate_single_service(self, sample_email_template, sample_job): + assert Notification.query.count() == 0 + + data_1 = _notification_json( + sample_email_template, job_id=sample_job.id, status="permanent-failure", feedback_type=NOTIFICATION_HARD_BOUNCE + ) + data_2 = _notification_json(sample_email_template, job_id=sample_job.id, status="created") + + notification_1 = Notification(**data_1) + notification_2 = Notification(**data_2) + dao_create_notification(notification_1) + dao_create_notification(notification_2) + + assert Notification.query.count() == 2 + + result = service_bounce_rate_for_day(sample_email_template.service_id, 2, datetime.utcnow() + timedelta(minutes=1)) + assert result.total_emails == 2 + assert result.hard_bounces == 1 + assert result.bounce_rate == 50 + + def test_bounce_rate_single_service_no_result(self, sample_service_full_permissions, sample_email_template, sample_job): + assert Notification.query.count() == 0 + + data_1 = _notification_json( + sample_email_template, job_id=sample_job.id, status="permanent-failure", feedback_type=NOTIFICATION_HARD_BOUNCE + ) + data_2 = _notification_json(sample_email_template, job_id=sample_job.id, status="created") + + notification_1 = Notification(**data_1) + notification_2 = Notification(**data_2) + dao_create_notification(notification_1) + dao_create_notification(notification_2) + + assert Notification.query.count() == 2 + assert sample_email_template.service_id != sample_service_full_permissions.id + result = service_bounce_rate_for_day(sample_service_full_permissions.id, 2, datetime.utcnow() + timedelta(minutes=1)) + assert result is None + + def test_total_notifications(self, sample_email_template, sample_job): + assert Notification.query.count() == 0 + + data_1 = _notification_json( + sample_email_template, job_id=sample_job.id, status="permanent-failure", feedback_type=NOTIFICATION_HARD_BOUNCE + ) + data_2 = _notification_json(sample_email_template, job_id=sample_job.id, status="created") + + notification_1 = Notification(**data_1) + notification_2 = Notification(**data_2) + dao_create_notification(notification_1) + dao_create_notification(notification_2) + + assert Notification.query.count() == 2 + result = total_notifications_grouped_by_hour(sample_email_template.service_id, datetime.utcnow() + timedelta(minutes=1)) + assert result[0].total_notifications == 2 + assert isinstance(result[0].hour, datetime) + + def test_total_hard_bounces(self, sample_email_template, sample_job): + assert Notification.query.count() == 0 + + data_1 = _notification_json( + sample_email_template, job_id=sample_job.id, status="permanent-failure", feedback_type=NOTIFICATION_HARD_BOUNCE + ) + data_2 = _notification_json(sample_email_template, job_id=sample_job.id, status="created") + + notification_1 = Notification(**data_1) + notification_2 = Notification(**data_2) + dao_create_notification(notification_1) + dao_create_notification(notification_2) + + assert Notification.query.count() == 2 + result = total_hard_bounces_grouped_by_hour(sample_email_template.service_id, datetime.utcnow() + timedelta(minutes=1)) + assert result[0].total_notifications == 1 + assert isinstance(result[0].hour, datetime) + + def test_total_hard_bounces_empty(self, sample_email_template, sample_job): + assert Notification.query.count() == 0 + + data_1 = _notification_json(sample_email_template, job_id=sample_job.id, status="delivered") + data_2 = _notification_json(sample_email_template, job_id=sample_job.id, status="created") + + notification_1 = Notification(**data_1) + notification_2 = Notification(**data_2) + dao_create_notification(notification_1) + dao_create_notification(notification_2) + + assert Notification.query.count() == 2 + result = total_hard_bounces_grouped_by_hour(sample_email_template.service_id, datetime.utcnow() + timedelta(minutes=1)) + assert result == [] diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index 581da5add7..2601522739 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -1,7 +1,6 @@ from datetime import date, datetime, timedelta import pytest -from flask import current_app from freezegun import freeze_time from app.dao.notifications_dao import ( @@ -99,7 +98,7 @@ def _create_templates(sample_service): @pytest.mark.parametrize("month, delete_run_time", [(4, "2016-04-10 23:40"), (1, "2016-01-11 00:40")]) @pytest.mark.parametrize( "notification_type, expected_sms_count, expected_email_count, expected_letter_count", - [("sms", 7, 10, 10), ("email", 10, 7, 10), ("letter", 10, 10, 7)], + [("sms", 7, 10, 10), ("email", 10, 7, 10)], ) def test_should_delete_notifications_by_type_after_seven_days( sample_service, @@ -111,7 +110,6 @@ def test_should_delete_notifications_by_type_after_seven_days( expected_email_count, expected_letter_count, ): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") email_template, letter_template, sms_template = _create_templates(sample_service) # create one notification a day between 1st and 10th from 11:00 to 19:00 of each type for i in range(1, 11): @@ -157,7 +155,6 @@ def test_should_delete_notifications_by_type_after_seven_days( @freeze_time("2016-01-10 12:00:00.000000") def test_should_not_delete_notification_history(sample_service, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") with freeze_time("2016-01-01 12:00"): email_template, letter_template, sms_template = _create_templates(sample_service) save_notification(create_notification(template=email_template, status="permanent-failure")) @@ -169,22 +166,13 @@ def test_should_not_delete_notification_history(sample_service, mocker): assert NotificationHistory.query.count() == 1 -@pytest.mark.parametrize("notification_type", ["sms", "email", "letter"]) +@pytest.mark.parametrize("notification_type", ["sms", "email"]) def test_delete_notifications_for_days_of_retention(sample_service, notification_type, mocker): - mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") create_test_data(notification_type, sample_service) assert Notification.query.count() == 9 delete_notifications_older_than_retention_by_type(notification_type) assert Notification.query.count() == 7 assert Notification.query.filter_by(notification_type=notification_type).count() == 1 - if notification_type == "letter": - mock_get_s3.assert_called_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - subfolder="{}/NOTIFY.LETTER_REF.D.2.C.C".format(str(datetime.utcnow().date())), - ) - assert mock_get_s3.call_count == 2 - else: - mock_get_s3.assert_not_called() def test_delete_notifications_inserts_notification_history(sample_service): @@ -197,7 +185,6 @@ def test_delete_notifications_inserts_notification_history(sample_service): def test_delete_notifications_updates_notification_history(sample_email_template, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") notification = save_notification( create_notification(template=sample_email_template, created_at=datetime.utcnow() - timedelta(days=8)) ) @@ -232,7 +219,6 @@ def test_delete_notifications_keep_data_for_days_of_retention_is_longer(sample_s def test_delete_notifications_with_test_keys(sample_template, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") save_notification( create_notification( template=sample_template, @@ -279,18 +265,8 @@ def test_delete_notifications_delete_notification_type_for_default_time_if_no_da assert Notification.query.filter_by(notification_type="email").count() == 1 -def test_delete_notifications_does_try_to_delete_from_s3_when_letter_has_not_been_sent(sample_service, mocker): - mock_get_s3 = mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") - letter_template = create_template(service=sample_service, template_type="letter") - - save_notification(create_notification(template=letter_template, status="sending", reference="LETTER_REF")) - delete_notifications_older_than_retention_by_type("email", qry_limit=1) - mock_get_s3.assert_not_called() - - @freeze_time("2016-01-10 12:00:00.000000") def test_should_not_delete_notification_if_history_does_not_exist(sample_service, mocker): - mocker.patch("app.dao.notifications_dao.get_s3_bucket_objects") mocker.patch("app.dao.notifications_dao.insert_update_notification_history") with freeze_time("2016-01-01 12:00"): email_template, letter_template, sms_template = _create_templates(sample_service) diff --git a/tests/app/dao/test_annual_billing_dao.py b/tests/app/dao/test_annual_billing_dao.py index 46a0e2ff0f..7b5ce59bb3 100644 --- a/tests/app/dao/test_annual_billing_dao.py +++ b/tests/app/dao/test_annual_billing_dao.py @@ -17,7 +17,6 @@ def test_dao_update_free_sms_fragment_limit(notify_db_session, sample_service): def test_create_annual_billing(sample_service): - dao_create_or_update_annual_billing_for_year(sample_service.id, 9999, 2016) free_limit = dao_get_free_sms_fragment_limit_for_year(sample_service.id, 2016) diff --git a/tests/app/dao/test_api_key_dao.py b/tests/app/dao/test_api_key_dao.py index 13b06437ec..eabb4cbdbf 100644 --- a/tests/app/dao/test_api_key_dao.py +++ b/tests/app/dao/test_api_key_dao.py @@ -1,6 +1,7 @@ from datetime import datetime, timedelta import pytest +from itsdangerous import BadSignature from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound @@ -10,9 +11,14 @@ get_model_api_keys, get_unsigned_secret, get_unsigned_secrets, + resign_api_keys, save_model_api_key, + update_compromised_api_key_info, + update_last_used_api_key, ) from app.models import KEY_TYPE_NORMAL, ApiKey +from tests.app.db import create_api_key +from tests.conftest import set_signer_secret_key def test_save_api_key_should_create_new_api_key_and_history(sample_service): @@ -30,6 +36,7 @@ def test_save_api_key_should_create_new_api_key_and_history(sample_service): assert len(all_api_keys) == 1 assert all_api_keys[0] == api_key assert api_key.version == 1 + assert api_key.last_used_timestamp is None all_history = api_key.get_history_model().query.all() assert len(all_history) == 1 @@ -55,6 +62,37 @@ def test_expire_api_key_should_update_the_api_key_and_create_history_record(noti sorted_all_history[1].version = 2 +def test_last_used_should_update_the_api_key_and_not_create_history_record(notify_api, sample_api_key): + last_used = datetime.utcnow() + update_last_used_api_key(api_key_id=sample_api_key.id, last_used=last_used) + all_api_keys = get_model_api_keys(service_id=sample_api_key.service_id) + assert len(all_api_keys) == 1 + assert all_api_keys[0].last_used_timestamp == last_used + + all_history = sample_api_key.get_history_model().query.all() + assert len(all_history) == 1 + + +def test_update_compromised_api_key_info_and_create_history_record(notify_api, sample_api_key): + update_compromised_api_key_info( + service_id=sample_api_key.service_id, api_key_id=sample_api_key.id, compromised_info={"key": "value"} + ) + all_api_keys = get_model_api_keys(service_id=sample_api_key.service_id) + assert len(all_api_keys) == 1 + assert all_api_keys[0].secret == sample_api_key.secret + assert all_api_keys[0].id == sample_api_key.id + assert all_api_keys[0].service_id == sample_api_key.service_id + assert all_api_keys[0].compromised_key_info == {"key": "value"} + + all_history = sample_api_key.get_history_model().query.all() + assert len(all_history) == 2 + assert all_history[0].id == sample_api_key.id + assert all_history[1].id == sample_api_key.id + sorted_all_history = sorted(all_history, key=lambda hist: hist.version) + sorted_all_history[0].version = 1 + sorted_all_history[1].version = 2 + + def test_get_api_key_should_raise_exception_when_api_key_does_not_exist(sample_service, fake_uuid): with pytest.raises(NoResultFound): get_model_api_keys(sample_service.id, id=fake_uuid) @@ -78,12 +116,27 @@ def test_get_unsigned_secret_returns_key(sample_api_key): assert unsigned_api_key == sample_api_key.secret -def test_get_api_key_by_secret(sample_api_key): - unsigned_secret = get_unsigned_secret(sample_api_key.id) - assert get_api_key_by_secret(unsigned_secret).id == sample_api_key.id +class TestGetAPIKeyBySecret: + def test_get_api_key_by_secret(self, sample_api_key): + secret = get_unsigned_secret(sample_api_key.id) + # Create token expected from the frontend + unsigned_secret = f"gcntfy-keyname-{sample_api_key.service_id}-{secret}" + assert get_api_key_by_secret(unsigned_secret).id == sample_api_key.id - with pytest.raises(NoResultFound): - get_api_key_by_secret("nope") + with pytest.raises(ValueError): + get_api_key_by_secret("nope") + + # Test getting secret without the keyname prefix + with pytest.raises(ValueError): + get_api_key_by_secret(str(sample_api_key.id)) + + # Test the service_name isnt part of the secret + with pytest.raises(ValueError): + get_api_key_by_secret(f"gcntfy-keyname-hello-{secret}") + + # Test the secret is incorrect + with pytest.raises(NoResultFound): + get_api_key_by_secret(f"gcntfy-keyname-hello-{sample_api_key.service_id}-1234") def test_should_not_allow_duplicate_key_names_per_service(sample_api_key, fake_uuid): @@ -159,3 +212,47 @@ def test_should_not_return_revoked_api_keys_older_than_7_days(sample_service, da all_api_keys = get_model_api_keys(service_id=sample_service.id) assert len(all_api_keys) == expected_length + + +class TestResigning: + @pytest.mark.parametrize("resign", [True, False]) + def test_resign_api_keys_resigns_or_previews(self, resign, sample_service): + from app import signer_api_key + + with set_signer_secret_key(signer_api_key, ["k1", "k2"]): + initial_key = create_api_key(service=sample_service) + secret = initial_key.secret + _secret = initial_key._secret + + with set_signer_secret_key(signer_api_key, ["k2", "k3"]): + resign_api_keys(resign=resign) + api_key = ApiKey.query.get(initial_key.id) + assert api_key.secret == secret # unsigned value is the same + if resign: + assert api_key._secret != _secret # signature is different + else: + assert api_key._secret == _secret # signature is the same + + def test_resign_api_keys_fails_if_cannot_verify_signatures(self, sample_service): + from app import signer_api_key + + with set_signer_secret_key(signer_api_key, ["k1", "k2"]): + create_api_key(service=sample_service) + + with set_signer_secret_key(signer_api_key, "k3"): + with pytest.raises(BadSignature): + resign_api_keys(resign=True) + + def test_resign_api_keys_unsafe_resigns_with_new_key(self, sample_service): + from app import signer_api_key + + with set_signer_secret_key(signer_api_key, ["k1", "k2"]): + initial_key = create_api_key(service=sample_service) + secret = initial_key.secret + _secret = initial_key._secret + + with set_signer_secret_key(signer_api_key, ["k3"]): + resign_api_keys(resign=True, unsafe=True) + api_key = ApiKey.query.get(initial_key.id) + assert api_key.secret == secret # unsigned value is the same + assert api_key._secret != _secret # signature is different diff --git a/tests/app/dao/test_date_utils.py b/tests/app/dao/test_date_utils.py index 5818e2f5f9..4286fb8da7 100644 --- a/tests/app/dao/test_date_utils.py +++ b/tests/app/dao/test_date_utils.py @@ -1,11 +1,13 @@ from datetime import date, datetime import pytest +import pytz from app.dao.date_util import ( get_april_fools, get_financial_year, get_financial_year_for_datetime, + get_midnight, get_month_start_and_end_date_in_utc, ) @@ -59,9 +61,9 @@ def test_get_april_fools(): ) def test_get_month_start_and_end_date_in_utc(month, year, expected_start, expected_end): month_year = datetime(year, month, 10, 13, 30, 00) - result = get_month_start_and_end_date_in_utc(month_year) - assert result[0] == expected_start - assert result[1] == expected_end + start, end = get_month_start_and_end_date_in_utc(month_year) + assert start == expected_start + assert end == expected_end @pytest.mark.parametrize( @@ -76,3 +78,53 @@ def test_get_month_start_and_end_date_in_utc(month, year, expected_start, expect ) def test_get_financial_year_for_datetime(dt, fy): assert get_financial_year_for_datetime(dt) == fy + + +class TestMidnightDateTime: + eastern = pytz.timezone("US/Eastern") + utc = pytz.utc + + @pytest.mark.parametrize( + "current_time, expected_midnight", + [ + ( + datetime(2022, 7, 1, 0, 00, 00, tzinfo=utc), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=utc), + ), + ( + datetime(2022, 7, 1, 4, 00, 00, tzinfo=utc), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=utc), + ), + ( + datetime(2022, 7, 1, 23, 59, 59, tzinfo=utc), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=utc), + ), + ( + datetime(2022, 7, 1, 4, 00, 00, tzinfo=utc), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=utc), + ), + ( + datetime(2022, 7, 1, 5, 00, 00, tzinfo=utc), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=utc), + ), + ( + datetime(2022, 7, 1, 20, 00, 00, tzinfo=utc), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=utc), + ), + ( + datetime(2022, 7, 1, 18, 00, 00, tzinfo=utc), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=utc), + ), + ( + datetime(2022, 7, 1, 18, 00, 00, tzinfo=eastern), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=eastern), + ), + ( + datetime(2022, 7, 1, 20, 00, 00, tzinfo=eastern), + datetime(2022, 7, 1, 0, 00, 00, tzinfo=eastern), + ), + ], + ) + def test_get_midnight(self, current_time, expected_midnight): + actual = get_midnight(current_time) + assert expected_midnight == actual diff --git a/tests/app/dao/test_email_branding_dao.py b/tests/app/dao/test_email_branding_dao.py index a69c912577..a3bc948a34 100644 --- a/tests/app/dao/test_email_branding_dao.py +++ b/tests/app/dao/test_email_branding_dao.py @@ -5,11 +5,12 @@ dao_update_email_branding, ) from app.models import EmailBranding -from tests.app.db import create_email_branding +from tests.app.db import create_email_branding, create_organisation def test_get_email_branding_options_gets_all_email_branding(notify_db, notify_db_session): - email_branding_1 = create_email_branding(name="test_email_branding_1") + org_1 = create_organisation() + email_branding_1 = create_email_branding(name="test_email_branding_1", organisation_id=org_1.id) email_branding_2 = create_email_branding(name="test_email_branding_2") email_branding = dao_get_email_branding_options() @@ -18,6 +19,13 @@ def test_get_email_branding_options_gets_all_email_branding(notify_db, notify_db assert email_branding_1 == email_branding[0] assert email_branding_2 == email_branding[1] + org_1_id = email_branding_1.organisation_id + + email_branding = dao_get_email_branding_options(filter_by_organisation_id=org_1_id) + assert len(email_branding) == 1 + assert email_branding_1 == email_branding[0] + assert email_branding[0].organisation_id == org_1_id + def test_get_email_branding_by_id_gets_correct_email_branding(notify_db, notify_db_session): email_branding = create_email_branding() diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 8e98fde0d6..34026b596e 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -1,7 +1,7 @@ from datetime import date, datetime, timedelta +from unittest import mock from uuid import UUID -import mock import pytest from freezegun import freeze_time from notifications_utils.timezones import convert_utc_to_local_timezone @@ -51,6 +51,7 @@ create_template, save_notification, ) +from tests.conftest import set_config def test_update_fact_notification_status(notify_db_session): @@ -249,10 +250,9 @@ def test_fetch_notification_status_for_service_for_today_and_7_previous_days( email_template = create_template(service=service_1, template_type=EMAIL_TYPE) create_ft_notification_status(date(2018, 10, 29), "sms", service_1, count=10) - create_ft_notification_status(date(2018, 10, 24), "sms", service_1, count=8) create_ft_notification_status(date(2018, 10, 29), "sms", service_1, notification_status="created") + create_ft_notification_status(date(2018, 10, 24), "sms", service_1, count=8) create_ft_notification_status(date(2018, 10, 29), "email", service_1, count=3) - create_ft_notification_status(date(2018, 10, 26), "letter", service_1, count=5) save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0))) save_notification(create_notification(sms_template_2, created_at=datetime(2018, 10, 31, 11, 0, 0))) @@ -263,54 +263,47 @@ def test_fetch_notification_status_for_service_for_today_and_7_previous_days( save_notification( create_notification( service_1.templates[0], - created_at=datetime(2018, 10, 30, 12, 0, 0), + created_at=datetime(2018, 10, 23, 12, 0, 0), status="delivered", ) ) - results = sorted( fetch_notification_status_for_service_for_today_and_7_previous_days(service_1.id), key=lambda x: (x.notification_type, x.status), ) - assert len(results) == 4 + assert len(results) == 3 assert results[0].notification_type == "email" assert results[0].status == "delivered" assert results[0].count == 4 - assert results[1].notification_type == "letter" - assert results[1].status == "delivered" - assert results[1].count == 5 + assert results[1].notification_type == "sms" + assert results[1].status == "created" + assert results[1].count == 3 assert results[2].notification_type == "sms" - assert results[2].status == "created" - assert results[2].count == 3 - - assert results[3].notification_type == "sms" - assert results[3].status == "delivered" - assert results[3].count == 11 + assert results[2].status == "delivered" + assert results[2].count == 19 @freeze_time("2018-10-31T18:00:00") # This test assumes the local timezone is EST -def test_fetch_notification_status_by_template_for_service_for_today_and_7_previous_days( - notify_db_session, -): +def test_fetch_notification_status_by_template_for_service_for_today_and_7_previous_days(notify_db_session, notify_api): service_1 = create_service(service_name="service_1") - sms_template = create_template(template_name="sms Template 1", service=service_1, template_type=SMS_TYPE) - sms_template_2 = create_template(template_name="sms Template 2", service=service_1, template_type=SMS_TYPE) - email_template = create_template(service=service_1, template_type=EMAIL_TYPE) + sms_template = create_template(template_name="SMS NON-FT", service=service_1, template_type=SMS_TYPE) + sms_template_2 = create_template(template_name="SMS1 NON-FT", service=service_1, template_type=SMS_TYPE) + email_template = create_template(template_name="EMAIL NON-FT", service=service_1, template_type=EMAIL_TYPE) # create unused email template - create_template(service=service_1, template_type=EMAIL_TYPE) + create_template(template_name="UNUSED", service=service_1, template_type=EMAIL_TYPE) - create_ft_notification_status(date(2018, 10, 29), "sms", service_1, count=10) - create_ft_notification_status(date(2018, 10, 29), "sms", service_1, count=11) + # 30 sms + create_ft_notification_status(date(2018, 10, 29), "sms", service_1, count=10, billable_units=20) + create_ft_notification_status(date(2018, 10, 28), "sms", service_1, count=11, billable_units=11) create_ft_notification_status(date(2018, 10, 24), "sms", service_1, count=8) - create_ft_notification_status(date(2018, 10, 29), "sms", service_1, notification_status="created") + create_ft_notification_status(date(2018, 10, 27), "sms", service_1, notification_status="created") create_ft_notification_status(date(2018, 10, 29), "email", service_1, count=3) - create_ft_notification_status(date(2018, 10, 26), "letter", service_1, count=5) save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0))) save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 31, 12, 0, 0), status="delivered")) @@ -321,26 +314,57 @@ def test_fetch_notification_status_by_template_for_service_for_today_and_7_previ save_notification( create_notification( service_1.templates[0], - created_at=datetime(2018, 10, 30, 12, 0, 0), + created_at=datetime(2018, 10, 23, 12, 0, 0), status="delivered", ) ) - results = fetch_notification_status_for_service_for_today_and_7_previous_days(service_1.id, by_template=True) - assert [ - ("email Template Name", False, mock.ANY, "email", "delivered", 1), + ("EMAIL NON-FT", False, mock.ANY, "email", "delivered", 1), ("email Template Name", False, mock.ANY, "email", "delivered", 3), - ("letter Template Name", False, mock.ANY, "letter", "delivered", 5), - ("sms Template 1", False, mock.ANY, "sms", "created", 1), + ("SMS NON-FT", False, mock.ANY, "sms", "created", 1), ("sms Template Name", False, mock.ANY, "sms", "created", 1), - ("sms Template 1", False, mock.ANY, "sms", "delivered", 1), - ("sms Template 2", False, mock.ANY, "sms", "delivered", 1), + ("SMS NON-FT", False, mock.ANY, "sms", "delivered", 1), + ("SMS1 NON-FT", False, mock.ANY, "sms", "delivered", 1), + ("sms Template Name", False, mock.ANY, "sms", "delivered", 8), ("sms Template Name", False, mock.ANY, "sms", "delivered", 10), ("sms Template Name", False, mock.ANY, "sms", "delivered", 11), ] == sorted(results, key=lambda x: (x.notification_type, x.status, x.template_name, x.count)) +@freeze_time("2018-10-31T18:00:00") +def test_fetch_notification_status_gets_data_from_correct_timeframe( + notify_db_session, +): + service_1 = create_service(service_name="service_1") + sms_template = create_template(service=service_1, template_type=SMS_TYPE) + email_template = create_template(service=service_1, template_type=EMAIL_TYPE) + + # create notifications for every hour of the day + for i in range(24): + save_notification(create_notification(email_template, created_at=datetime(2018, 10, 30, i, 0, 0), status="delivered")) + save_notification(create_notification(email_template, created_at=datetime(2018, 10, 30, i, 0, 59), status="delivered")) + save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 30, i, 0, 0), status="delivered")) + save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 30, i, 0, 30), status="delivered")) + save_notification(create_notification(sms_template, created_at=datetime(2018, 10, 30, i, 0, 59), status="delivered")) + + # too early, shouldn't be included + save_notification( + create_notification( + service_1.templates[0], + created_at=datetime(2018, 10, 29, 23, 59, 59), + status="delivered", + ) + ) + data = fetch_notification_status_for_day(process_day=datetime.utcnow() - timedelta(days=1)) + + assert data[0].notification_type == "email" + assert data[0].notification_count == 48 + + assert data[1].notification_type == "sms" + assert data[1].notification_count == 72 + + def test_get_total_notifications_sent_for_api_key(notify_db_session): service = create_service(service_name="First Service") api_key = create_api_key(service) @@ -366,24 +390,20 @@ def test_get_total_notifications_sent_for_api_key(notify_db_session): assert dict(api_key_stats_3) == dict([(EMAIL_TYPE, total_sends), (SMS_TYPE, total_sends)]) +def test_get_last_send_for_api_key_check_last_used(notify_db_session): + service = create_service(service_name="First Service") + api_key = create_api_key(service, last_used=datetime.utcnow()) + + last_send = get_last_send_for_api_key(str(api_key.id))[0][0] + assert last_send == api_key.last_used_timestamp + + def test_get_last_send_for_api_key(notify_db_session): service = create_service(service_name="First Service") api_key = create_api_key(service) - template_email = create_template(service=service, template_type=EMAIL_TYPE) - total_sends = 10 - last_send = get_last_send_for_api_key(str(api_key.id)) assert last_send == [] - for x in range(total_sends): - save_notification(create_notification(template=template_email, api_key=api_key)) - - # the following lines test that a send has occurred within the last second - last_send = get_last_send_for_api_key(str(api_key.id))[0][0] - now = datetime.utcnow() - time_delta = now - last_send - assert abs(time_delta.total_seconds()) < 1 - def test_get_api_key_ranked_by_notifications_created(notify_db_session): service = create_service(service_name="Service 1") @@ -425,6 +445,26 @@ def test_get_api_key_ranked_by_notifications_created(notify_db_session): assert int(second_place[8]) == sms_sends +# def test_last_used_for_api_key(notify_db_session): +# service = create_service(service_name="Service 1") +# api_key_1 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 1") +# api_key_2 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 2") +# api_key_3 = create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name="Key 3") +# template_email = create_template(service=service, template_type="email") +# create_notification_history(template=template_email, api_key=api_key_1, created_at="2022-03-04") +# save_notification(create_notification(template=template_email, api_key=api_key_1, created_at="2022-03-05")) + +# assert (get_last_send_for_api_key(str(api_key_1.id))[0][0]).strftime("%Y-%m-%d") == "2022-03-05" + +# save_notification(create_notification(template=template_email, api_key=api_key_2, created_at="2022-03-06")) + +# assert (get_last_send_for_api_key(str(api_key_2.id))[0][0]).strftime("%Y-%m-%d") == "2022-03-06" + +# create_notification_history(template=template_email, api_key=api_key_3, created_at="2022-03-07") + +# assert (get_last_send_for_api_key(str(api_key_3.id))[0][0]).strftime("%Y-%m-%d") == "2022-03-07" + + @pytest.mark.parametrize( "start_date, end_date, expected_email, expected_letters, expected_sms, expected_created_sms", [ @@ -722,6 +762,62 @@ def test_fetch_delivered_notification_stats_by_month(sample_service): assert results[3].count == 6 +@freeze_time("2020-11-02 14:00") +def test_fetch_delivered_notification_stats_by_month_filter_heartbeats(notify_api, sample_service): + sms_template = create_template(service=sample_service, template_type="sms", template_name="a") + email_template = create_template(service=sample_service, template_type="email", template_name="b") + + # Not counted: before GC Notify started + create_ft_notification_status( + utc_date=date(2019, 10, 10), + service=sample_service, + template=email_template, + count=3, + ) + + create_ft_notification_status( + utc_date=date(2019, 12, 10), + service=sample_service, + template=email_template, + count=3, + ) + + create_ft_notification_status( + utc_date=date(2019, 12, 5), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_DELIVERED, + count=6, + ) + + create_ft_notification_status( + utc_date=date(2020, 1, 1), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_SENT, + count=4, + ) + + # Not counted: failed notifications + create_ft_notification_status( + utc_date=date(2020, 1, 1), + service=sample_service, + template=sms_template, + notification_status=NOTIFICATION_FAILED, + count=10, + ) + + create_ft_notification_status( + utc_date=date(2020, 3, 1), + service=sample_service, + template=email_template, + count=5, + ) + with set_config(notify_api, "NOTIFY_SERVICE_ID", email_template.service_id): + results = fetch_delivered_notification_stats_by_month(filter_heartbeats=True) + assert len(results) == 0 + + def test_fetch_delivered_notification_stats_by_month_empty(): assert fetch_delivered_notification_stats_by_month() == [] @@ -1126,3 +1222,36 @@ def test_fetch_monthly_notification_statuses_per_service_for_rows_that_should_be results = fetch_monthly_notification_statuses_per_service(date(2019, 3, 1), date(2019, 3, 31)) assert len(results) == 0 + + +# Freezegun is currently unable of handling non-timezone naive dates: +# https://github.com/spulec/freezegun/issues/89 : https://github.com/spulec/freezegun/issues/487 +# So while the timeframe boundaries we're testing here are 5AM to 5AM UTC across 2 days, because the start/end dates +# are timezone aware our boundaries for the purpose of this test are 23h to 23h. +@freeze_time("2018-11-01T18:00:00") +def test_fetch_notification_status_for_service_for_today_handles_midnight_utc( + notify_db_session, +): + service_1 = create_service(service_name="service_1") + email_template = create_template(service=service_1, template_type=EMAIL_TYPE) + + # create notifications that should not be included in today's count + create_ft_notification_status(date(2018, 10, 24), "email", service_1, count=30) + create_ft_notification_status(date(2018, 10, 31), "email", service_1, count=20) + + save_notification(create_notification(email_template, created_at=datetime(2018, 10, 31, 0, 0, 0), status="delivered")) + save_notification(create_notification(email_template, created_at=datetime(2018, 10, 31, 11, 59, 59), status="delivered")) + save_notification(create_notification(email_template, created_at=datetime(2018, 10, 31, 11, 59, 59), status="delivered")) + save_notification(create_notification(email_template, created_at=datetime(2018, 10, 31, 23, 59, 59), status="delivered")) + + # create notifications that should be included in count + save_notification(create_notification(email_template, created_at=datetime(2018, 11, 1, 13, 0, 0), status="delivered")) + save_notification(create_notification(email_template, created_at=datetime(2018, 11, 1, 6, 0, 0), status="delivered")) + save_notification(create_notification(email_template, created_at=datetime(2018, 11, 1, 17, 59, 59), status="delivered")) + + # checking the daily stats for this day should give us the 3 created after 12am UTC + results = sorted( + fetch_notification_status_for_service_for_today_and_7_previous_days(service_1.id, limit_days=1), + key=lambda x: (x.notification_type, x.status), + ) + assert results[0][2] == 3 diff --git a/tests/app/dao/test_inbound_sms_dao.py b/tests/app/dao/test_inbound_sms_dao.py index 8f9b160a7f..68f6759194 100644 --- a/tests/app/dao/test_inbound_sms_dao.py +++ b/tests/app/dao/test_inbound_sms_dao.py @@ -1,7 +1,9 @@ from datetime import datetime from itertools import product +import pytest from freezegun import freeze_time +from itsdangerous import BadSignature from app.dao.inbound_sms_dao import ( dao_count_inbound_sms_for_service, @@ -10,13 +12,15 @@ dao_get_paginated_inbound_sms_for_service_for_public_api, dao_get_paginated_most_recent_inbound_sms_by_user_number_for_service, delete_inbound_sms_older_than_retention, + resign_inbound_sms, ) +from app.models import InboundSms from tests.app.db import ( create_inbound_sms, create_service, create_service_data_retention, ) -from tests.conftest import set_config +from tests.conftest import set_config, set_signer_secret_key def test_get_all_inbound_sms(sample_service): @@ -373,3 +377,47 @@ def test_most_recent_inbound_sms_only_returns_values_within_7_days(sample_servic assert len(res.items) == 1 assert res.items[0].content == "new" + + +class TestResigning: + @pytest.mark.parametrize("resign", [True, False]) + def test_resign_inbound_sms_resigns_or_previews(self, resign, sample_service): + from app import signer_inbound_sms + + with set_signer_secret_key(signer_inbound_sms, ["k1", "k2"]): + initial_sms = create_inbound_sms(service=sample_service) + content = initial_sms.content + _content = initial_sms._content + + with set_signer_secret_key(signer_inbound_sms, ["k2", "k3"]): + resign_inbound_sms(resign=resign) + sms = InboundSms.query.get(initial_sms.id) + assert sms.content == content # unsigned value is the same + if resign: + assert sms._content != _content # signature is different + else: + assert sms._content == _content # signature is the same + + def test_resign_inbound_sms_fails_if_cannot_verify_signatures(self, sample_service): + from app import signer_inbound_sms + + with set_signer_secret_key(signer_inbound_sms, ["k1", "k2"]): + create_inbound_sms(service=sample_service) + + with set_signer_secret_key(signer_inbound_sms, "k3"): + with pytest.raises(BadSignature): + resign_inbound_sms(resign=True) + + def test_resign_inbound_sms_unsafe_resigns_with_new_key(self, sample_service): + from app import signer_inbound_sms + + with set_signer_secret_key(signer_inbound_sms, ["k1", "k2"]): + initial_sms = create_inbound_sms(service=sample_service) + content = initial_sms.content + _content = initial_sms._content + + with set_signer_secret_key(signer_inbound_sms, ["k3"]): + resign_inbound_sms(resign=True, unsafe=True) + sms = InboundSms.query.get(initial_sms.id) + assert sms.content == content # unsigned value is the same + assert sms._content != _content # signature is different diff --git a/tests/app/dao/test_invited_user_dao.py b/tests/app/dao/test_invited_user_dao.py index bbb669a271..fef62a740a 100644 --- a/tests/app/dao/test_invited_user_dao.py +++ b/tests/app/dao/test_invited_user_dao.py @@ -13,6 +13,7 @@ save_invited_user, ) from app.models import InvitedUser +from tests.app.conftest import create_sample_invited_user def test_create_invited_user(notify_db, notify_db_session, sample_service): @@ -78,17 +79,15 @@ def test_get_unknown_invited_user_returns_none(notify_db, notify_db_session, sam with pytest.raises(NoResultFound) as e: get_invited_user(sample_service.id, unknown_id) - assert "No row was found for one()" in str(e.value) + assert "No row was found when one was required" in str(e.value) def test_get_invited_users_for_service(notify_db, notify_db_session, sample_service): - from tests.app.conftest import sample_invited_user - invites = [] for i in range(0, 5): email = "invited_user_{}@service.gov.uk".format(i) - invited_user = sample_invited_user(notify_db, notify_db_session, sample_service, email) + invited_user = create_sample_invited_user(notify_db, notify_db_session, sample_service, email) invites.append(invited_user) all_from_db = get_invited_users_for_service(sample_service.id) diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index d83cebc2c4..58e3007739 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -17,10 +17,12 @@ dao_set_scheduled_jobs_to_pending, dao_update_job, ) +from app.dao.service_data_retention_dao import insert_service_data_retention from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, Job from tests.app.db import ( create_job, create_notification, + create_notification_history, create_service, create_template, save_notification, @@ -61,6 +63,26 @@ def test_should_return_notifications_only_for_this_job(sample_template): assert {row.status: row.count for row in results} == {"created": 1} +def test_get_notification_outcomes_should_return_history_rows(sample_template): + job_1 = create_job(sample_template) + + save_notification(create_notification_history(sample_template, job=job_1, status="created")) + save_notification(create_notification_history(sample_template, job=job_1, status="sent")) + + results = dao_get_notification_outcomes_for_job(sample_template.service_id, job_1.id) + assert {row.status: row.count for row in results} == {"created": 1, "sent": 1} + + +def test_get_notification_outcomes_should_return_history_and_non_history_rows(sample_template): + job_1 = create_job(sample_template) + + save_notification(create_notification_history(sample_template, job=job_1, status="sent")) + save_notification(create_notification(sample_template, job=job_1, status="created")) + + results = dao_get_notification_outcomes_for_job(sample_template.service_id, job_1.id) + assert {row.status: row.count for row in results} == {"created": 1, "sent": 1} + + def test_should_return_notifications_only_for_this_service( sample_notification_with_job, ): @@ -327,6 +349,42 @@ def test_should_get_jobs_seven_days_old_by_scheduled_for_date(sample_service): assert job_to_remain.id not in [job.id for job in jobs] +@freeze_time("2016-10-31 10:00:00") +def test_should_get_limited_number_of_jobs(sample_template): + flexible_retention_service1 = create_service(service_name="Another service 1") + insert_service_data_retention(flexible_retention_service1.id, sample_template.template_type, 3) + flexible_template1 = create_template(flexible_retention_service1, template_type=sample_template.template_type) + + flexible_retention_service2 = create_service(service_name="Another service 2") + insert_service_data_retention(flexible_retention_service2.id, sample_template.template_type, 2) + flexible_template2 = create_template(flexible_retention_service2, template_type=sample_template.template_type) + + eight_days_ago = datetime.utcnow() - timedelta(days=8) + four_days_ago = datetime.utcnow() - timedelta(days=4) + + for _ in range(4): + create_job(flexible_template1, created_at=four_days_ago) + create_job(flexible_template2, created_at=four_days_ago) + create_job(sample_template, created_at=eight_days_ago) + + jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type], limit=3) + + assert len(jobs) == 3 + + +@freeze_time("2016-10-31 10:00:00") +def test_should_get_not_get_limited_number_of_jobs_by_default(sample_template): + eight_days_ago = datetime.utcnow() - timedelta(days=8) + + create_job(sample_template, created_at=eight_days_ago) + create_job(sample_template, created_at=eight_days_ago) + create_job(sample_template, created_at=eight_days_ago) + + jobs = dao_get_jobs_older_than_data_retention(notification_types=[sample_template.template_type]) + + assert len(jobs) == 3 + + def assert_job_stat(job, result, sent, delivered, failed): assert result.job_id == job.id assert result.original_file_name == job.original_file_name diff --git a/tests/app/dao/test_organisation_dao.py b/tests/app/dao/test_organisation_dao.py index 9c891a3846..3b2ddb99f6 100644 --- a/tests/app/dao/test_organisation_dao.py +++ b/tests/app/dao/test_organisation_dao.py @@ -272,7 +272,6 @@ def test_add_user_to_organisation_when_organisation_does_not_exist(sample_user): ), ) def test_get_organisation_by_email_address(domain, expected_org, notify_db_session): - org = create_organisation() create_domain("example.gov.uk", org.id) create_domain("test.gov.uk", org.id) diff --git a/tests/app/dao/test_permissions_dao.py b/tests/app/dao/test_permissions_dao.py index 5c1ead3bd6..02bbfbf567 100644 --- a/tests/app/dao/test_permissions_dao.py +++ b/tests/app/dao/test_permissions_dao.py @@ -1,30 +1,27 @@ from app.dao.permissions_dao import permission_dao -from tests.app.conftest import sample_service as create_service +from tests.app.conftest import create_sample_service def test_get_permissions_by_user_id_returns_all_permissions(sample_service): permissions = permission_dao.get_permissions_by_user_id(user_id=sample_service.users[0].id) assert len(permissions) == 8 - assert ( - sorted( - [ - "manage_users", - "manage_templates", - "manage_settings", - "send_texts", - "send_emails", - "send_letters", - "manage_api_keys", - "view_activity", - ] - ) - == sorted([i.permission for i in permissions]) - ) + assert sorted( + [ + "manage_users", + "manage_templates", + "manage_settings", + "send_texts", + "send_emails", + "send_letters", + "manage_api_keys", + "view_activity", + ] + ) == sorted([i.permission for i in permissions]) def test_get_permissions_by_user_id_returns_only_active_service(notify_db, notify_db_session, sample_user): - active_service = create_service(notify_db, notify_db_session, service_name="Active service", user=sample_user) - inactive_service = create_service(notify_db, notify_db_session, service_name="Inactive service", user=sample_user) + active_service = create_sample_service(notify_db, notify_db_session, service_name="Active service", user=sample_user) + inactive_service = create_sample_service(notify_db, notify_db_session, service_name="Inactive service", user=sample_user) inactive_service.active = False permissions = permission_dao.get_permissions_by_user_id(user_id=sample_user.id) assert len(permissions) == 8 diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index 5b8b8e5348..a2d73afdd9 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -29,7 +29,7 @@ def test_can_get_sms_non_international_providers(restore_provider_details): def test_can_get_sms_international_providers(restore_provider_details): sms_providers = get_provider_details_by_notification_type("sms", True) - assert len(sms_providers) == 1 + assert len(sms_providers) == 3 assert all("sms" == prov.notification_type for prov in sms_providers) assert all(prov.supports_international for prov in sms_providers) @@ -241,9 +241,14 @@ def test_get_sms_provider_with_equal_priority_returns_provider( def test_get_current_sms_provider_returns_active_only(restore_provider_details): + # Note that we currently have two active sms providers: sns and pinpoint. current_provider = get_current_provider("sms") current_provider.active = False dao_update_provider_details(current_provider) + current_provider = get_current_provider("sms") + current_provider.active = False + dao_update_provider_details(current_provider) + new_current_provider = get_current_provider("sms") assert new_current_provider is None @@ -286,7 +291,7 @@ def test_dao_get_provider_stats(notify_db_session): assert result[1].identifier == "sns" assert result[1].display_name == "AWS SNS" - assert result[1].supports_international is False + assert result[1].supports_international is True assert result[1].active is True assert result[1].current_month_billable_sms == 4 @@ -307,6 +312,6 @@ def test_dao_get_provider_stats(notify_db_session): assert result[5].identifier == "pinpoint" assert result[5].notification_type == "sms" - assert result[5].supports_international is False - assert result[5].active is False + assert result[5].supports_international is True + assert result[5].active is True assert result[5].current_month_billable_sms == 0 diff --git a/tests/app/dao/test_service_callback_api_dao.py b/tests/app/dao/test_service_callback_api_dao.py index b92fb96e11..e55610d1fc 100644 --- a/tests/app/dao/test_service_callback_api_dao.py +++ b/tests/app/dao/test_service_callback_api_dao.py @@ -1,17 +1,20 @@ import uuid import pytest +from itsdangerous import BadSignature from sqlalchemy.exc import SQLAlchemyError -from app import encryption +from app import signer_bearer_token from app.dao.service_callback_api_dao import ( get_service_callback_api, get_service_delivery_status_callback_api_for_service, reset_service_callback_api, + resign_service_callbacks, save_service_callback_api, ) from app.models import ServiceCallbackApi from tests.app.db import create_service_callback_api +from tests.conftest import set_signer_secret_key def test_save_service_callback_api(sample_service): @@ -40,7 +43,7 @@ def test_save_service_callback_api(sample_service): assert versioned.service_id == sample_service.id assert versioned.updated_by_id == sample_service.users[0].id assert versioned.url == "https://some_service/callback_endpoint" - assert encryption.decrypt(versioned._bearer_token) == "some_unique_string" + assert signer_bearer_token.verify(versioned._bearer_token) == "some_unique_string" assert versioned.updated_at is None assert versioned.version == 1 @@ -143,7 +146,7 @@ def test_update_service_callback_api(sample_service): assert x.id is not None assert x.service_id == sample_service.id assert x.updated_by_id == sample_service.users[0].id - assert encryption.decrypt(x._bearer_token) == "some_unique_string" + assert signer_bearer_token.verify(x._bearer_token) == "some_unique_string" def test_get_service_callback_api(sample_service): @@ -174,3 +177,47 @@ def test_get_service_delivery_status_callback_api_for_service(sample_service): assert result.created_at == service_callback_api.created_at assert result.updated_at == service_callback_api.updated_at assert result.updated_by_id == service_callback_api.updated_by_id + + +class TestResigning: + @pytest.mark.parametrize("resign", [True, False]) + def test_resign_callbacks_resigns_or_previews(self, resign, sample_service): + from app import signer_bearer_token + + with set_signer_secret_key(signer_bearer_token, ["k1", "k2"]): + initial_callback = create_service_callback_api(service=sample_service) + bearer_token = initial_callback.bearer_token + _bearer_token = initial_callback._bearer_token + + with set_signer_secret_key(signer_bearer_token, ["k2", "k3"]): + resign_service_callbacks(resign=resign) + callback = ServiceCallbackApi.query.get(initial_callback.id) + assert callback.bearer_token == bearer_token # unsigned value is the same + if resign: + assert callback._bearer_token != _bearer_token # signature is different + else: + assert callback._bearer_token == _bearer_token # signature is the same + + def test_resign_callbacks_fails_if_cannot_verify_signatures(self, sample_service): + from app import signer_bearer_token + + with set_signer_secret_key(signer_bearer_token, ["k1", "k2"]): + create_service_callback_api(service=sample_service) + + with set_signer_secret_key(signer_bearer_token, ["k3"]): + with pytest.raises(BadSignature): + resign_service_callbacks(resign=True) + + def test_resign_callbacks_unsafe_resigns_with_new_key(self, sample_service): + from app import signer_bearer_token + + with set_signer_secret_key(signer_bearer_token, ["k1", "k2"]): + initial_callback = create_service_callback_api(service=sample_service) + bearer_token = initial_callback.bearer_token + _bearer_token = initial_callback._bearer_token + + with set_signer_secret_key(signer_bearer_token, ["k3"]): + resign_service_callbacks(resign=True, unsafe=True) + callback = ServiceCallbackApi.query.get(initial_callback.id) + assert callback.bearer_token == bearer_token # unsigned value is the same + assert callback._bearer_token != _bearer_token # signature is different diff --git a/tests/app/dao/test_service_email_reply_to_dao.py b/tests/app/dao/test_service_email_reply_to_dao.py index 815b1cf667..db99567eea 100644 --- a/tests/app/dao/test_service_email_reply_to_dao.py +++ b/tests/app/dao/test_service_email_reply_to_dao.py @@ -256,14 +256,25 @@ def test_archive_reply_to_email_address_does_not_archive_a_reply_to_for_a_differ assert not reply_to.archived -def test_archive_reply_to_email_address_raises_an_error_if_attempting_to_archive_a_default( +def test_archive_reply_to_email_address_if_default_and_no_other_addresses_exist( sample_service, ): - create_reply_to_email(service=sample_service, email_address="first@address.com", is_default=False) default_reply_to = create_reply_to_email(service=sample_service, email_address="first@address.com") + archive_reply_to_email_address(sample_service.id, default_reply_to.id) + + assert default_reply_to.archived is True + assert default_reply_to.updated_at is not None + + +def test_archive_reply_to_email_address_raises_an_error_if_default_and_other_addresses_exist( + sample_service, +): + default_reply_to = create_reply_to_email(service=sample_service, email_address="first@address.com") + create_reply_to_email(service=sample_service, email_address="second@address.com", is_default=False) + with pytest.raises(ArchiveValidationError) as e: archive_reply_to_email_address(sample_service.id, default_reply_to.id) - assert "You cannot delete a default email reply to address" in str(e.value) + assert "You cannot delete a default email reply to address if other reply to addresses exist" in str(e.value) assert not default_reply_to.archived diff --git a/tests/app/dao/test_service_inbound_api_dao.py b/tests/app/dao/test_service_inbound_api_dao.py index ce95ddc2b0..9920516d4e 100644 --- a/tests/app/dao/test_service_inbound_api_dao.py +++ b/tests/app/dao/test_service_inbound_api_dao.py @@ -3,7 +3,7 @@ import pytest from sqlalchemy.exc import SQLAlchemyError -from app import encryption +from app import signer_bearer_token from app.dao.service_inbound_api_dao import ( get_service_inbound_api, get_service_inbound_api_for_service, @@ -40,7 +40,7 @@ def test_save_service_inbound_api(sample_service): assert versioned.service_id == sample_service.id assert versioned.updated_by_id == sample_service.users[0].id assert versioned.url == "https://some_service/inbound_messages" - assert encryption.decrypt(versioned._bearer_token) == "some_unique_string" + assert signer_bearer_token.verify(versioned._bearer_token) == "some_unique_string" assert versioned.updated_at is None assert versioned.version == 1 @@ -100,7 +100,7 @@ def test_update_service_inbound_api(sample_service): assert x.id is not None assert x.service_id == sample_service.id assert x.updated_by_id == sample_service.users[0].id - assert encryption.decrypt(x._bearer_token) == "some_unique_string" + assert signer_bearer_token.verify(x._bearer_token) == "some_unique_string" def test_get_service_inbound_api(sample_service): diff --git a/tests/app/dao/test_service_whitelist_dao.py b/tests/app/dao/test_service_whitelist_dao.py index 54363ba15b..666e6848ef 100644 --- a/tests/app/dao/test_service_whitelist_dao.py +++ b/tests/app/dao/test_service_whitelist_dao.py @@ -6,7 +6,7 @@ dao_remove_service_safelist, ) from app.models import EMAIL_TYPE, ServiceSafelist -from tests.app.conftest import sample_service as create_service +from tests.app.conftest import create_sample_service def test_fetch_service_safelist_gets_safelists(sample_service_safelist): @@ -30,8 +30,8 @@ def test_add_and_commit_safelisted_contacts_saves_data(sample_service): def test_remove_service_safelist_only_removes_for_my_service(notify_db, notify_db_session): - service_1 = create_service(notify_db, notify_db_session, service_name="service 1") - service_2 = create_service(notify_db, notify_db_session, service_name="service 2") + service_1 = create_sample_service(notify_db, notify_db_session, service_name="service 1") + service_2 = create_sample_service(notify_db, notify_db_session, service_name="service 2") dao_add_and_commit_safelisted_contacts( [ ServiceSafelist.from_string(service_1.id, EMAIL_TYPE, "service1@example.com"), diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index daac8d79f1..ef57322c9c 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -1,6 +1,6 @@ import json import uuid -from datetime import datetime +from datetime import datetime, timedelta import pytest from freezegun import freeze_time @@ -37,13 +37,16 @@ dao_suspend_service, dao_update_service, delete_service_and_all_associated_db_objects, + fetch_service_email_limit, fetch_todays_total_message_count, + fetch_todays_total_sms_count, get_services_by_partial_name, ) from app.dao.users_dao import create_user_code, save_model_user from app.models import ( EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, + JOB_STATUS_SCHEDULED, KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, @@ -66,6 +69,7 @@ user_folder_permissions, ) from app.schemas import service_schema +from tests.app.conftest import create_sample_job from tests.app.db import ( create_annual_billing, create_api_key, @@ -85,6 +89,7 @@ create_user, save_notification, ) +from tests.conftest import set_config # from unittest import mock @@ -102,6 +107,7 @@ def test_create_service(notify_db_session): name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, organisation_type="central", created_by=user, @@ -134,6 +140,7 @@ def test_create_service_with_organisation(notify_db_session): name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, organisation_type="central", created_by=user, @@ -194,6 +201,7 @@ def test_create_nhs_service_get_default_branding_based_on_email_address( name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, organisation_type=organisation_type, created_by=user, @@ -216,6 +224,7 @@ def test_cannot_create_two_services_with_same_name(notify_db_session): name="service_name", email_from="email_from1", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -224,6 +233,7 @@ def test_cannot_create_two_services_with_same_name(notify_db_session): name="service_name", email_from="email_from2", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -240,6 +250,7 @@ def test_cannot_create_two_services_with_same_email_from(notify_db_session): name="service_name1", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -247,6 +258,7 @@ def test_cannot_create_two_services_with_same_email_from(notify_db_session): name="service_name2", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -263,6 +275,7 @@ def test_cannot_create_service_with_no_user(notify_db_session): name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -277,6 +290,7 @@ def test_should_add_user_to_service(notify_db_session): name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -347,6 +361,7 @@ def test_should_remove_user_from_service(notify_db_session): name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -481,7 +496,8 @@ def test_get_all_user_services_should_return_empty_list_if_no_services_for_user( @freeze_time("2019-04-23T10:00:00") -def test_dao_fetch_live_services_data(sample_user): +@pytest.mark.parametrize("filter_heartbeats", [True, False]) +def test_dao_fetch_live_services_data_filter_heartbeats(notify_api, sample_user, filter_heartbeats): org = create_organisation(organisation_type="nhs_central") service = create_service(go_live_user=sample_user, go_live_at="2014-04-20T10:00:00") template = create_template(service=service) @@ -549,8 +565,12 @@ def test_dao_fetch_live_services_data(sample_user): # 3rd service: billing from 2019 create_annual_billing(service_3.id, 200, 2019) - results = dao_fetch_live_services_data() - assert len(results) == 3 + with set_config(notify_api, "NOTIFY_SERVICE_ID", template.service_id): + results = dao_fetch_live_services_data(filter_heartbeats=filter_heartbeats) + if not filter_heartbeats: + assert len(results) == 3 + else: + assert len(results) == 2 # checks the results and that they are ordered by date: # @todo: this test is temporarily forced to pass until we can add the fiscal year back into # the query and create a new endpoint for the homepage stats @@ -579,7 +599,7 @@ def test_dao_fetch_live_services_data(sample_user): def test_get_service_by_id_returns_none_if_no_service(notify_db): with pytest.raises(NoResultFound) as e: dao_fetch_service_by_id(str(uuid.uuid4())) - assert "No row was found for one()" in str(e) + assert "No row was found when one was required" in str(e) def test_get_service_by_id_returns_service(notify_db_session): @@ -589,7 +609,7 @@ def test_get_service_by_id_returns_service(notify_db_session): def test_get_service_by_id_uses_redis_cache_when_use_cache_specified(notify_db_session, mocker): sample_service = create_service(service_name="testing", email_from="testing") - service_json = {"data": service_schema.dump(sample_service).data} + service_json = {"data": service_schema.dump(sample_service)} service_json["data"]["all_template_folders"] = ["b5035a31-b1da-42f8-b2b8-ce2acaa0b819"] service_json["data"]["annual_billing"] = ["8676fa80-a97b-43e7-8318-ee905de2d652", "a0751f79-984b-4d9e-9edd-42457fd458e9"] @@ -610,7 +630,7 @@ def test_get_service_by_id_uses_redis_cache_when_use_cache_specified(notify_db_s service = dao_fetch_service_by_id(sample_service.id, use_cache=True) assert mocked_redis_get.called - assert str(sample_service.id) == service[0].id + assert str(sample_service.id) == service.id def test_create_service_returns_service_with_default_permissions(notify_db_session): @@ -700,6 +720,7 @@ def test_create_service_creates_a_history_record_with_current_data(notify_db_ses name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -726,6 +747,7 @@ def test_update_service_creates_a_history_record_with_current_data(notify_db_ses name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -759,6 +781,7 @@ def test_update_service_permission_creates_a_history_record_with_current_data( name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -807,6 +830,7 @@ def test_create_service_and_history_is_transactional(notify_db_session): name=None, email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -814,7 +838,7 @@ def test_create_service_and_history_is_transactional(notify_db_session): with pytest.raises(IntegrityError) as excinfo: dao_create_service(service, user) - assert 'column "name" of relation "services_history" violates not-null constraint' in str(excinfo.value) + assert 'null value in column "name" violates not-null constraint' in str(excinfo.value) assert Service.query.count() == 0 assert Service.get_history_model().query.count() == 0 @@ -863,6 +887,7 @@ def test_add_existing_user_to_another_service_doesnot_change_old_permissions( name="service_one", email_from="service_one", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=user, ) @@ -883,6 +908,7 @@ def test_add_existing_user_to_another_service_doesnot_change_old_permissions( name="service_two", email_from="service_two", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=other_user, ) @@ -919,6 +945,7 @@ def test_fetch_stats_filters_on_service(notify_db_session): email_from="hello", restricted=False, message_limit=1000, + sms_daily_limit=1000, ) dao_create_service(service_two, service_one.created_by) @@ -936,7 +963,7 @@ def test_fetch_stats_ignores_historical_notification_data(sample_template): assert len(stats) == 0 -def test_fetch_stats_counts_correctly(notify_db_session): +def test_fetch_stats_counts_correctly(notify_db_session, notify_api): service = create_service() sms_template = create_template(service=service) email_template = create_template(service=service, template_type="email") @@ -944,7 +971,7 @@ def test_fetch_stats_counts_correctly(notify_db_session): save_notification(create_notification(template=email_template, status="created")) save_notification(create_notification(template=email_template, status="created")) save_notification(create_notification(template=email_template, status="technical-failure")) - save_notification(create_notification(template=sms_template, status="created")) + save_notification(create_notification(template=sms_template, status="created", billable_units=10)) stats = dao_fetch_stats_for_service(sms_template.service_id, 7) stats = sorted(stats, key=lambda x: (x.notification_type, x.status)) @@ -1035,15 +1062,52 @@ def test_fetch_stats_should_not_gather_notifications_older_than_7_days(sample_te assert len(stats) == rows_returned -def test_dao_fetch_todays_total_message_count_returns_count_for_today( - notify_db_session, -): - notification = save_notification(create_notification(template=create_template(service=create_service()))) - assert fetch_todays_total_message_count(notification.service.id) == 1 +@pytest.mark.usefixtures("notify_db_session") +class TestFetchTotalMessageCount: + def test_dao_fetch_todays_total_message_count_returns_count_for_today(self): + notification = save_notification(create_notification(template=create_template(service=create_service()))) + assert fetch_todays_total_message_count(notification.service.id) == 1 + def test_dao_fetch_todays_total_message_count_returns_0_when_no_messages_for_today(self): + assert fetch_todays_total_message_count(uuid.uuid4()) == 0 -def test_dao_fetch_todays_total_message_count_returns_0_when_no_messages_for_today(notify_db, notify_db_session): - assert fetch_todays_total_message_count(uuid.uuid4()) == 0 + def test_dao_fetch_todays_total_message_count_returns_0_with_yesterday_messages(self): + today = datetime.utcnow().date() + yesterday = today - timedelta(days=1) + notification = save_notification( + create_notification(created_at=yesterday, template=create_template(service=create_service())) + ) + assert fetch_todays_total_message_count(notification.service.id) == 0 + + +@pytest.mark.usefixtures("notify_db_session") +class TestFetchTodaysTotalSmsCount: + def test_returns_count_for_today(self): + service = create_service() + sms_template = create_template(service=service, template_type=SMS_TYPE) + save_notification(create_notification(template=sms_template)) + save_notification(create_notification(template=sms_template)) + assert fetch_todays_total_sms_count(service.id) == 2 + + def test_only_counts_sms(self): + service = create_service() + sms_template = create_template(service=service, template_type=SMS_TYPE) + email_template = create_template(service=service, template_type=EMAIL_TYPE) + save_notification(create_notification(template=sms_template)) + save_notification(create_notification(template=sms_template)) + save_notification(create_notification(template=email_template)) + assert fetch_todays_total_sms_count(service.id) == 2 + + def test_returns_0_when_no_messages_for_today(self): + assert fetch_todays_total_sms_count(uuid.uuid4()) == 0 + + def test_returns_0_with_yesterday_messages(self): + service = create_service() + sms_template = create_template(service=service, template_type=SMS_TYPE) + today = datetime.utcnow().date() + yesterday = today - timedelta(days=1) + save_notification(create_notification(created_at=yesterday, template=sms_template)) + assert fetch_todays_total_sms_count(service.id) == 0 def test_dao_fetch_todays_stats_for_all_services_includes_all_services( @@ -1286,6 +1350,7 @@ def test_dao_fetch_service_creator(notify_db_session): name="service_name", email_from="email_from", message_limit=1000, + sms_daily_limit=1000, restricted=False, created_by=active_user_1, ) @@ -1336,3 +1401,137 @@ def create_email_sms_letter_template(): template_two = create_template(service=service, template_name="2", template_type="sms") template_three = create_template(service=service, template_name="3", template_type="letter") return template_one, template_three, template_two + + +class TestServiceEmailLimits: + def test_get_email_count_for_service(self): + active_user_1 = create_user(email="active1@foo.com", state="active") + service = Service( + name="service_name", + email_from="email_from", + message_limit=1000, + sms_daily_limit=1000, + restricted=False, + created_by=active_user_1, + ) + dao_create_service( + service, + active_user_1, + service_permissions=[ + SMS_TYPE, + EMAIL_TYPE, + INTERNATIONAL_SMS_TYPE, + ], + ) + assert fetch_service_email_limit(service.id) == 1000 + + def test_dao_fetch_todays_total_message_count_returns_count_for_today(self): + service = create_service() + email_template = create_template(service=service, template_type="email") + save_notification(create_notification(template=email_template, status="created")) + assert fetch_todays_total_message_count(service.id) == 1 + + def test_dao_fetch_todays_total_message_count_returns_0_when_no_messages_for_today(self): + assert fetch_todays_total_message_count(uuid.uuid4()) == 0 + + def test_dao_fetch_todays_total_message_count_returns_0_with_yesterday_messages(self): + today = datetime.utcnow().date() + yesterday = today - timedelta(days=1) + notification = save_notification( + create_notification( + created_at=yesterday, + template=create_template(service=create_service(service_name="tester123"), template_type="email"), + ) + ) + assert fetch_todays_total_message_count(notification.service.id) == 0 + + def test_dao_fetch_todays_total_message_count_counts_notifications_in_jobs_scheduled_for_today( + self, notify_db, notify_db_session + ): + service = create_service(service_name="tester12") + template = create_template(service=service, template_type="email") + today = datetime.utcnow().date() + + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today, + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + save_notification( + create_notification( + created_at=today, + template=template, + ) + ) + assert fetch_todays_total_message_count(service.id) == 11 + + def test_dao_fetch_todays_total_message_count_counts_notifications_in_jobs_scheduled_for_today_but_not_after_today( + self, notify_db, notify_db_session + ): + service = create_service() + template = create_template(service=service, template_type="email") + today = datetime.utcnow().date() + + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today, + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + save_notification( + create_notification( + created_at=today, + template=template, + ) + ) + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today + timedelta(days=1), + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + + assert fetch_todays_total_message_count(service.id) == 11 + + def test_dao_fetch_todays_total_message_count_counts_notifications_in_jobs_scheduled_for_today_but_not_before_today( + self, notify_db, notify_db_session + ): + service = create_service() + template = create_template(service=service, template_type="email") + today = datetime.utcnow().date() + + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today, + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + create_sample_job( + notify_db, + notify_db_session, + service=service, + template=template, + scheduled_for=today - timedelta(days=1), + job_status=JOB_STATUS_SCHEDULED, + notification_count=10, + ) + save_notification( + create_notification( + created_at=today, + template=template, + ) + ) + assert fetch_todays_total_message_count(service.id) == 11 diff --git a/tests/app/dao/test_template_categories_dao.py b/tests/app/dao/test_template_categories_dao.py new file mode 100644 index 0000000000..3ae33d1561 --- /dev/null +++ b/tests/app/dao/test_template_categories_dao.py @@ -0,0 +1,393 @@ +import pytest +from flask import current_app + +from app.dao.template_categories_dao import ( + dao_create_template_category, + dao_delete_template_category_by_id, + dao_get_all_template_categories, + dao_get_template_category_by_id, + dao_get_template_category_by_template_id, + dao_update_template_category, +) +from app.dao.templates_dao import dao_create_template +from app.errors import InvalidRequest +from app.models import BULK, NORMAL, Template, TemplateCategory +from tests.app.conftest import create_sample_template + + +class TestCreateTemplateCategory: + def test_create_template_category(self, notify_db_session): + data = { + "name_en": "english", + "name_fr": "french", + "description_en": "english description", + "description_fr": "french description", + "sms_process_type": NORMAL, + "email_process_type": NORMAL, + "hidden": False, + "sms_sending_vehicle": "short_code", + } + + template_category = TemplateCategory(**data) + dao_create_template_category(template_category) + + temp_cat = dao_get_all_template_categories() + assert TemplateCategory.query.count() == 1 + assert len(temp_cat) == 1 + assert temp_cat[0].sms_sending_vehicle == "short_code" + + def test_create_template_category_with_no_sms_sending_vehicle(self, notify_db_session): + data = { + "name_en": "english", + "name_fr": "french", + "description_en": "english description", + "description_fr": "french description", + "sms_process_type": NORMAL, + "email_process_type": NORMAL, + "hidden": False, + } + + template_category = TemplateCategory(**data) + dao_create_template_category(template_category) + + temp_cat = dao_get_all_template_categories() + assert TemplateCategory.query.count() == 1 + assert len(temp_cat) == 1 + assert temp_cat[0].sms_sending_vehicle == "long_code" # default value + + +@pytest.mark.parametrize( + "category, updated_category", + [ + ( + { + "name_en": "english", + "name_fr": "french", + "description_en": "english description", + "description_fr": "french description", + "sms_process_type": NORMAL, + "email_process_type": NORMAL, + "hidden": False, + }, + { + "name_en": "new english", + "name_fr": "new french", + "description_en": "new english description", + "description_fr": "new french description", + "sms_process_type": BULK, + "email_process_type": BULK, + "hidden": True, + }, + ) + ], +) +def test_update_template_category(notify_db_session, category, updated_category): + template_category = TemplateCategory(**category) + dao_create_template_category(template_category) + + for key, value in updated_category.items(): + setattr(template_category, key, value) + + dao_update_template_category(template_category) + + fetched_category = dao_get_all_template_categories()[0] + + assert fetched_category.id == template_category.id + for key, value in updated_category.items(): + assert getattr(fetched_category, key) == value + + +@pytest.mark.parametrize( + "category, template", + [ + ( + { + "name_en": "english", + "name_fr": "french", + "description_en": "english description", + "description_fr": "french description", + "sms_process_type": NORMAL, + "email_process_type": NORMAL, + "hidden": False, + }, + { + "name": "Sample Template", + "template_type": "email", + "content": "Template content", + }, + ) + ], +) +def test_dao_get_template_category_by_template_id(category, template, notify_db_session, sample_service, sample_user): + template_category = TemplateCategory(**category) + dao_create_template_category(template_category) + + template = Template(**template) + template.service = sample_service + template.created_by = sample_user + template.template_category = template_category + dao_create_template(template) + + assert dao_get_template_category_by_template_id(template.id) == template_category + + +def test_get_template_category_by_id(notify_db_session): + data = { + "name_en": "english", + "name_fr": "french", + "description_en": "english description", + "description_fr": "french description", + "sms_process_type": NORMAL, + "email_process_type": NORMAL, + "hidden": False, + } + + template_category = TemplateCategory(**data) + dao_create_template_category(template_category) + + assert dao_get_template_category_by_id(template_category.id) == template_category + + +@pytest.mark.parametrize( + "template_type, hidden, expected_count, categories_to_insert", + [ + ( + None, + None, + 2, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": False, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": False, + }, + ], + ), + # Filter by template type SMS + ( + "sms", + None, + 2, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": False, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": False, + }, + ], + ), + # Filter by template type email + ( + "email", + None, + 2, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": False, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": False, + }, + ], + ), + # Filter by hidden False + ( + None, + False, + 1, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": False, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": True, + }, + ], + ), + # Filter by hidden True + ( + None, + True, + 1, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": False, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": True, + }, + ], + ), + # Filter by template type SMS and hidden False + ( + "sms", + False, + 1, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": False, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": True, + }, + ], + ), + ( + "sms", + False, + 0, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": True, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": True, + }, + ], + ), + # Filter by template type email and hidden True + ( + "email", + True, + 1, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": False, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": True, + }, + ], + ), + ( + "email", + True, + 0, + [ + { + "name_en": "english", + "name_fr": "french", + "sms_process_type": "normal", + "email_process_type": "normal", + "hidden": False, + }, + { + "name_en": "english2", + "name_fr": "french2", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": False, + }, + ], + ), + ], +) +def test_get_all_template_categories_with_filters( + template_type, hidden, expected_count, categories_to_insert, notify_db, notify_db_session +): + for category_data in categories_to_insert: + template_category = TemplateCategory(**category_data) + dao_create_template_category(template_category) + + create_sample_template(notify_db, notify_db_session, template_type="email", template_category=template_category) + create_sample_template(notify_db, notify_db_session, template_type="sms", template_category=template_category) + + retrieved_categories = dao_get_all_template_categories(template_type=template_type, hidden=hidden) + + assert len(retrieved_categories) == expected_count + + +def test_dao_delete_template_category_by_id_should_delete_category_when_no_associated_templates( + notify_db_session, sample_template_category +): + dao_delete_template_category_by_id(sample_template_category.id) + + assert TemplateCategory.query.count() == 0 + + +def test_dao_delete_template_category_by_id_should_not_allow_deletion_when_associated_with_template( + notify_db, notify_db_session, sample_template_category +): + create_sample_template(notify_db, notify_db_session, template_category=sample_template_category) + + with pytest.raises(InvalidRequest): + dao_delete_template_category_by_id(sample_template_category.id) + + assert TemplateCategory.query.count() == 1 + + +def test_dao_delete_template_category_by_id_should_allow_deletion_with_cascade_when_associated_with_template( + notify_db, notify_db_session, sample_template_category, populate_generic_categories +): + template = create_sample_template(notify_db, notify_db_session, template_category=sample_template_category) + + dao_delete_template_category_by_id(sample_template_category.id, cascade=True) + # 3 here because we have 3 generic defaut categories that will remain post-delete + assert TemplateCategory.query.count() == 3 + assert str(template.template_category_id) == current_app.config["DEFAULT_TEMPLATE_CATEGORY_MEDIUM"] diff --git a/tests/app/dao/test_templates_dao.py b/tests/app/dao/test_templates_dao.py index 732e94eff7..516ceb6220 100644 --- a/tests/app/dao/test_templates_dao.py +++ b/tests/app/dao/test_templates_dao.py @@ -16,22 +16,25 @@ dao_get_template_versions, dao_redact_template, dao_update_template, + dao_update_template_category, dao_update_template_reply_to, ) -from app.models import Template, TemplateFolder, TemplateHistory, TemplateRedacted +from app.models import Template, TemplateHistory, TemplateRedacted from app.schemas import template_schema from tests.app.db import create_letter_contact, create_template @pytest.mark.parametrize( - "template_type, subject", + "template_type, subject, redact_personalisation", [ - ("sms", None), - ("email", "subject"), - ("letter", "subject"), + ("sms", None, False), + ("email", "subject", False), + ("letter", "subject", False), + ("sms", None, True), + ("email", "subject", True), ], ) -def test_create_template(sample_service, sample_user, template_type, subject): +def test_create_template(sample_service, sample_user, template_type, subject, redact_personalisation): data = { "name": "Sample Template", "template_type": template_type, @@ -44,12 +47,15 @@ def test_create_template(sample_service, sample_user, template_type, subject): if subject: data.update({"subject": subject}) template = Template(**data) - dao_create_template(template) + dao_create_template(template, redact_personalisation=redact_personalisation) assert Template.query.count() == 1 assert len(dao_get_all_templates_for_service(sample_service.id)) == 1 assert dao_get_all_templates_for_service(sample_service.id)[0].name == "Sample Template" assert dao_get_all_templates_for_service(sample_service.id)[0].process_type == "normal" + assert ( + dao_get_all_templates_for_service(sample_service.id)[0].template_redacted.redact_personalisation == redact_personalisation + ) def test_create_template_creates_redact_entry(sample_service): @@ -99,35 +105,6 @@ def test_update_template(sample_service, sample_user): assert dao_get_all_templates_for_service(sample_service.id)[0].name == "new name" -def test_update_template_in_a_folder_to_archived(sample_service, sample_user): - template_data = { - "name": "Sample Template", - "template_type": "sms", - "content": "Template content", - "service": sample_service, - "created_by": sample_user, - } - template = Template(**template_data) - - template_folder_data = { - "name": "My Folder", - "service_id": sample_service.id, - } - template_folder = TemplateFolder(**template_folder_data) - - template.folder = template_folder - dao_create_template(template) - - template.archived = True - dao_update_template(template) - - template_folder = TemplateFolder.query.one() - archived_template = Template.query.one() - - assert template_folder - assert not archived_template.folder - - def test_dao_update_template_reply_to_none_to_some(sample_service, sample_user): letter_contact = create_letter_contact(sample_service, "Edinburgh, ED1 1AA") @@ -318,7 +295,7 @@ def test_get_all_templates_ignores_hidden_templates(sample_service): def test_get_template_id_from_redis_when_cached(sample_service, mocker): sample_template = create_template(template_name="Test Template", service=sample_service) - json_data = {"data": template_schema.dump(sample_template).data} + json_data = {"data": template_schema.dump(sample_template)} mocked_redis_get = mocker.patch.object( redis_store, "get", @@ -328,13 +305,12 @@ def test_get_template_id_from_redis_when_cached(sample_service, mocker): template = dao_get_template_by_id(sample_template.id, use_cache=True) assert mocked_redis_get.called - assert str(sample_template.id) == template[0].id - assert json.dumps(json_data["data"], default=lambda o: o.hex if isinstance(o, UUID) else None) == json.dumps(template[1]) + assert str(sample_template.id) == template.id def test_get_template_id_with_specific_version_from_redis(sample_service, mocker, notify_db_session): sample_template = create_template(template_name="Test Template", service=sample_service) - json_data = {"data": template_schema.dump(sample_template).data} + json_data = {"data": template_schema.dump(sample_template)} mocked_redis_get = mocker.patch.object( redis_store, "get", @@ -344,9 +320,8 @@ def test_get_template_id_with_specific_version_from_redis(sample_service, mocker template = dao_get_template_by_id(sample_template.id, version=1, use_cache=True) assert mocked_redis_get.called - assert str(sample_template.id) == template[0].id - assert isinstance(template[0], TemplateHistory) - assert json.dumps(json_data["data"], default=lambda o: o.hex if isinstance(o, UUID) else None) == json.dumps(template[1]) + assert str(sample_template.id) == template.id + assert isinstance(template, TemplateHistory) def test_get_template_by_id_and_service(sample_service): @@ -377,7 +352,7 @@ def test_get_template_version_returns_none_for_hidden_templates(sample_service): def test_get_template_by_id_and_service_returns_none_if_no_template(sample_service, fake_uuid): with pytest.raises(NoResultFound) as e: dao_get_template_by_id_and_service_id(template_id=fake_uuid, service_id=sample_service.id) - assert "No row was found for one" in str(e.value) + assert "No row was found when one was required" in str(e.value) def test_create_template_creates_a_history_record_with_current_data(sample_service, sample_user): @@ -473,8 +448,9 @@ def test_get_template_versions(sample_template): from app.schemas import template_history_schema - v = template_history_schema.load(versions, many=True) + v = template_history_schema.dump(versions, many=True) assert len(v) == 2 + assert {template_history["version"] for template_history in v} == {1, 2} def test_get_template_versions_is_empty_for_hidden_templates(sample_service): @@ -515,3 +491,16 @@ def test_template_postage_constraint_on_update(sample_service, sample_user): created.postage = "third" with pytest.raises(expected_exception=SQLAlchemyError): dao_update_template(created) + + +def test_dao_update_template_category(sample_template, sample_template_category): + dao_update_template_category(sample_template.id, sample_template_category.id) + + updated_template = Template.query.get(sample_template.id) + assert updated_template.template_category_id == sample_template_category.id + assert updated_template.updated_at is not None + assert updated_template.version == 2 + + history = TemplateHistory.query.filter_by(id=sample_template.id, version=updated_template.version).one() + assert not history.template_category_id + assert history.updated_at == updated_template.updated_at diff --git a/tests/app/dao/test_users_dao.py b/tests/app/dao/test_users_dao.py index a9ceb9967c..5e792ce8a6 100644 --- a/tests/app/dao/test_users_dao.py +++ b/tests/app/dao/test_users_dao.py @@ -176,9 +176,11 @@ def test_update_user_attribute_blocked(): def test_update_user_password(notify_api, notify_db, notify_db_session, sample_user): password = "newpassword" + sample_user.password_expired = True assert not sample_user.check_password(password) update_user_password(sample_user, password) assert sample_user.check_password(password) + assert not sample_user.password_expired def test_count_user_verify_codes(sample_user): diff --git a/tests/app/db.py b/tests/app/db.py index f60b5c5875..f06478ab54 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -109,6 +109,7 @@ def create_service( email_from=None, prefix_sms=True, message_limit=1000, + sms_daily_limit=1000, organisation_type="central", check_if_service_exists=False, go_live_user=None, @@ -122,6 +123,7 @@ def create_service( service = Service( name=service_name, message_limit=message_limit, + sms_daily_limit=sms_daily_limit, restricted=restricted, email_from=email_from if email_from else service_name.lower().replace(" ", "."), created_by=user if user else create_user(email="{}@digital.cabinet-office.gov.uk".format(uuid.uuid4())), @@ -186,6 +188,7 @@ def create_template( hidden=False, archived=False, folder=None, + template_category=None, postage=None, process_type="normal", ): @@ -198,6 +201,7 @@ def create_template( "reply_to": reply_to, "hidden": hidden, "folder": folder, + "template_category": template_category, "process_type": process_type, } if template_type == LETTER_TYPE: @@ -238,6 +242,7 @@ def create_notification( reply_to_text=None, created_by_id=None, postage=None, + queue_name=None, ): """ Creates in memory Notification Model @@ -295,6 +300,7 @@ def create_notification( "reply_to_text": reply_to_text, "created_by_id": created_by_id, "postage": postage, + "queue_name": queue_name, } return Notification(**data) @@ -396,6 +402,7 @@ def create_job( template, notification_count=1, created_at=None, + updated_at=None, job_status="pending", scheduled_for=None, processing_started=None, @@ -412,6 +419,7 @@ def create_job( "original_file_name": original_file_name, "notification_count": notification_count, "created_at": created_at or datetime.utcnow(), + "updated_at": updated_at, "created_by": template.created_by, "job_status": job_status, "scheduled_for": scheduled_for, @@ -496,13 +504,15 @@ def create_service_callback_api( return service_callback_api -def create_email_branding(colour="blue", logo="test_x2.png", name="test_org_1", text="DisplayName"): +def create_email_branding(colour="blue", logo="test_x2.png", name="test_org_1", text="DisplayName", organisation_id=None): data = { "colour": colour, "logo": logo, "name": name, "text": text, } + if organisation_id: + data["organisation_id"] = organisation_id email_branding = EmailBranding(**data) dao_create_email_branding(email_branding) @@ -545,7 +555,7 @@ def create_letter_rate( return rate -def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None): +def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None, last_used=None): id_ = uuid.uuid4() name = key_name if key_name else "{} api key {}".format(key_type, id_) @@ -557,6 +567,7 @@ def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None): key_type=key_type, id=id_, secret=uuid.uuid4(), + last_used_timestamp=last_used, ) db.session.add(api_key) db.session.commit() @@ -635,7 +646,6 @@ def create_annual_billing(service_id, free_sms_fragment_limit, financial_year_st def create_domain(domain, organisation_id): - domain = Domain(domain=domain, organisation_id=organisation_id) db.session.add(domain) @@ -733,7 +743,11 @@ def create_ft_notification_status( key_type="normal", notification_status="delivered", count=1, + billable_units=None, ): + if billable_units is None: + billable_units = count + if job: template = job.template if template: @@ -753,6 +767,7 @@ def create_ft_notification_status( key_type=key_type, notification_status=notification_status, notification_count=count, + billable_units=billable_units, ) db.session.add(data) db.session.commit() @@ -789,7 +804,6 @@ def create_service_data_retention(service, notification_type="sms", days_of_rete def create_invited_user(service=None, to_email_address=None): - if service is None: service = create_service() if to_email_address is None: diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py index bfa5f2d0f4..56b0eef3de 100644 --- a/tests/app/delivery/test_send_to_providers.py +++ b/tests/app/delivery/test_send_to_providers.py @@ -1,23 +1,28 @@ import uuid from collections import namedtuple from datetime import datetime +from unittest import TestCase from unittest.mock import ANY, MagicMock, call import pytest from flask import current_app from notifications_utils.recipients import validate_and_format_phone_number +from pytest_mock import MockFixture from requests import HTTPError import app from app import aws_sns_client +from app.config import Config from app.dao import notifications_dao, provider_details_dao from app.dao.provider_details_dao import ( dao_switch_sms_provider_to_provider_with_identifier, ) from app.delivery import send_to_providers from app.exceptions import ( + DocumentDownloadException, InvalidUrlException, - MalwarePendingException, + MalwareDetectedException, + MalwareScanInProgressException, NotificationTechnicalFailureException, ) from app.models import ( @@ -28,11 +33,12 @@ KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, + BounceRateStatus, EmailBranding, Notification, Service, ) -from tests.app.conftest import document_download_response, sample_email_template +from tests.app.conftest import create_sample_email_template, document_download_response from tests.app.db import ( create_notification, create_reply_to_email, @@ -45,6 +51,112 @@ from tests.conftest import set_config_values +class TestProviderToUse: + def test_should_use_pinpoint_for_sms_by_default_if_configured(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234") + assert provider.name == "pinpoint" + + def test_should_use_sns_for_sms_by_default_if_partially_configured(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [], + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", template_id=uuid.uuid4()) + assert provider.name == "sns" + + def test_should_use_pinpoint_for_sms_for_sc_template_if_sc_pool_configured(self, restore_provider_details, notify_api): + sc_template = uuid.uuid4() + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "", + "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sc_template)], + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", template_id=sc_template) + assert provider.name == "pinpoint" + + def test_should_use_sns_for_sms_if_dedicated_number(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234", False, "+12345678901") + assert provider.name == "sns" + + def test_should_use_sns_for_sms_if_sending_to_the_US(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+17065551234") + assert provider.name == "sns" + + @pytest.mark.serial + def test_should_use_pinpoint_for_sms_if_sending_outside_zone_1(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+447512501324", international=True) + assert provider.name == "pinpoint" + + def test_should_use_sns_for_sms_if_sending_to_non_CA_zone_1(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16715550123") + assert provider.name == "sns" + + def test_should_use_sns_for_sms_if_match_fails(self, restore_provider_details, notify_api): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "8695550123") # This number fails our matching code + assert provider.name == "sns" + + @pytest.mark.parametrize("sc_pool_id, default_pool_id", [("", "default_pool_id"), ("sc_pool_id", "")]) + def test_should_use_sns_if_pinpoint_not_configured(self, restore_provider_details, notify_api, sc_pool_id, default_pool_id): + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": sc_pool_id, + "AWS_PINPOINT_DEFAULT_POOL_ID": default_pool_id, + }, + ): + provider = send_to_providers.provider_to_use("sms", "1234", "+16135551234") + assert provider.name == "sns" + + @pytest.mark.skip(reason="Currently using only 1 SMS provider") def test_should_return_highest_priority_active_provider(restore_provider_details): providers = provider_details_dao.get_provider_details_by_notification_type("sms") @@ -78,19 +190,29 @@ def test_should_return_highest_priority_active_provider(restore_provider_details assert send_to_providers.provider_to_use("sms", "1234").name == first.identifier -def test_provider_to_use(restore_provider_details): - providers = provider_details_dao.get_provider_details_by_notification_type("sms") - first = providers[0] - - assert first.identifier == "sns" +def test_should_handle_opted_out_phone_numbers_if_using_pinpoint(notify_api, sample_template, mocker): + mocker.patch("app.aws_pinpoint_client.send_sms", return_value="opted_out") + db_notification = save_notification( + create_notification( + template=sample_template, + to_field="+16135551234", + status="created", + reply_to_text=sample_template.service.get_default_sms_sender(), + ) + ) - # provider is still SNS if SMS and sender is set - provider = send_to_providers.provider_to_use("sms", "1234", False, "+12345678901") - assert first.identifier == provider.name + with set_config_values( + notify_api, + { + "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id", + "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id", + }, + ): + send_to_providers.send_sms_to_provider(db_notification) - # provider is highest priority sms provider if sender is not set - provider = send_to_providers.provider_to_use("sms", "1234", False) - assert first.identifier == provider.name + notification = Notification.query.filter_by(id=db_notification.id).one() + assert notification.status == "permanent-failure" + assert notification.provider_response == "Phone number is opted out" def test_should_send_personalised_template_to_correct_sms_provider_and_persist(sample_sms_template_with_html, mocker): @@ -114,6 +236,9 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist(s content="Sample service: Hello Jo\nHere is some HTML & entities", reference=str(db_notification.id), sender=current_app.config["FROM_NUMBER"], + template_id=sample_sms_template_with_html.id, + service_id=sample_sms_template_with_html.service_id, + sending_vehicle=None, ) notification = Notification.query.filter_by(id=db_notification.id).one() @@ -143,11 +268,12 @@ def test_should_send_personalised_template_to_correct_email_provider_and_persist mocker.patch("app.aws_ses_client.send_email", return_value="reference") statsd_mock = mocker.patch("app.delivery.send_to_providers.statsd_client") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") send_to_providers.send_email_to_provider(db_notification) app.aws_ses_client.send_email.assert_called_once_with( - '"Sample service" ', + '"=?utf-8?B?U2FtcGxlIHNlcnZpY2U=?=" ', "jo.smith@example.com", "Jo some HTML", body="Hello Jo\nThis is an email from GOV.\u200bUK with some HTML\n", @@ -172,6 +298,7 @@ def test_should_send_personalised_template_to_correct_email_provider_and_persist assert call(statsd_key) in statsd_mock.incr.call_args_list +@pytest.mark.skip(reason="the validator can throw a 500 causing us to fail all tests") def test_should_send_personalised_template_with_html_enabled(sample_email_template_with_advanced_html, mocker, notify_api): db_notification = save_notification( create_notification( @@ -232,11 +359,12 @@ def test_should_respect_custom_sending_domains(sample_service, mocker, sample_em sample_service.sending_domain = "foo.bar" mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") send_to_providers.send_email_to_provider(db_notification) app.aws_ses_client.send_email.assert_called_once_with( - '"Sample service" ', + '"=?utf-8?B?U2FtcGxlIHNlcnZpY2U=?=" ', "jo.smith@example.com", "Jo some HTML", body="Hello Jo\nThis is an email from GOV.\u200bUK with some HTML\n", @@ -282,6 +410,24 @@ def test_should_not_send_sms_message_when_message_is_empty_or_whitespace(sample_ assert Notification.query.get(notification.id).status == "technical-failure" +def test_should_not_send_sms_message_to_internal_test_number(sample_service, mocker): + template = create_template(sample_service) + notification = save_notification( + create_notification( + template=template, + to_field=Config.INTERNAL_TEST_NUMBER, + status="created", + reply_to_text=sample_service.get_default_sms_sender(), + ) + ) + mocker.patch("app.delivery.send_to_providers.send_sms_response", return_value="reference") + send_mock = mocker.patch("app.aws_sns_client.send_sms") + send_to_providers.send_sms_to_provider(notification) + + send_mock.assert_not_called() + assert Notification.query.get(notification.id).status == "sent" + + def test_send_sms_should_use_template_version_from_notification_not_latest(sample_template, mocker): db_notification = save_notification( create_notification( @@ -311,6 +457,9 @@ def test_send_sms_should_use_template_version_from_notification_not_latest(sampl content="Sample service: This is a template:\nwith a newline", reference=str(db_notification.id), sender=current_app.config["FROM_NUMBER"], + template_id=sample_template.id, + service_id=sample_template.service_id, + sending_vehicle=ANY, ) persisted_notification = notifications_dao.get_notification_by_id(db_notification.id) @@ -377,10 +526,10 @@ def test_should_not_send_to_provider_when_status_is_not_created(sample_template, def test_should_send_sms_with_downgraded_content(notify_db_session, mocker): # é, o, and u are in GSM. - # á, ï, grapes, tabs, zero width space and ellipsis are not - msg = "á é ï o u 🍇 foo\tbar\u200bbaz((misc))…" + # grapes, tabs, zero width space and ellipsis are not + msg = "é o u 🍇 foo\tbar\u200bbaz((misc))…" placeholder = "∆∆∆abc" - gsm_message = "?odz Housing Service: a é i o u ? foo barbaz???abc..." + gsm_message = "?odz Housing Service: é o u ? foo barbaz???abc..." service = create_service(service_name="Łódź Housing Service") template = create_template(service, content=msg) db_notification = save_notification(create_notification(template=template, personalisation={"misc": placeholder})) @@ -389,7 +538,9 @@ def test_should_send_sms_with_downgraded_content(notify_db_session, mocker): send_to_providers.send_sms_to_provider(db_notification) - aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=gsm_message, reference=ANY, sender=ANY) + aws_sns_client.send_sms.assert_called_once_with( + to=ANY, content=gsm_message, reference=ANY, sender=ANY, template_id=ANY, service_id=ANY, sending_vehicle=ANY + ) def test_send_sms_should_use_service_sms_sender(sample_service, sample_template, mocker): @@ -402,7 +553,15 @@ def test_send_sms_should_use_service_sms_sender(sample_service, sample_template, db_notification, ) - app.aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=ANY, reference=ANY, sender=sms_sender.sms_sender) + app.aws_sns_client.send_sms.assert_called_once_with( + to=ANY, + content=ANY, + reference=ANY, + sender=sms_sender.sms_sender, + template_id=ANY, + service_id=ANY, + sending_vehicle=ANY, + ) @pytest.mark.parametrize("research_mode,key_type", [(True, KEY_TYPE_NORMAL), (False, KEY_TYPE_TEST)]) @@ -450,6 +609,7 @@ def test_send_email_to_provider_should_not_send_to_provider_when_status_is_not_c def test_send_email_should_use_service_reply_to_email(sample_service, sample_email_template, mocker): mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") db_notification = save_notification(create_notification(template=sample_email_template, reply_to_text="foo@bar.com")) create_reply_to_email(service=sample_service, email_address="foo@bar.com") @@ -469,6 +629,54 @@ def test_send_email_should_use_service_reply_to_email(sample_service, sample_ema ) +def test_send_email_should_use_default_service_reply_to_email_when_two_are_set(sample_service, sample_email_template, mocker): + mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") + + create_reply_to_email(service=sample_service, email_address="foo@bar.com") + create_reply_to_email(service=sample_service, email_address="foo_two@bar.com", is_default=False) + + db_notification = save_notification(create_notification(template=sample_email_template, reply_to_text="foo@bar.com")) + + send_to_providers.send_email_to_provider( + db_notification, + ) + + app.aws_ses_client.send_email.assert_called_once_with( + ANY, + ANY, + ANY, + body=ANY, + html_body=ANY, + reply_to_address="foo@bar.com", + attachments=[], + ) + + +def test_send_email_should_use_non_default_service_reply_to_email_when_it_is_set(sample_service, sample_email_template, mocker): + mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") + + create_reply_to_email(service=sample_service, email_address="foo@bar.com") + create_reply_to_email(service=sample_service, email_address="foo_two@bar.com", is_default=False) + + db_notification = save_notification(create_notification(template=sample_email_template, reply_to_text="foo_two@bar.com")) + + send_to_providers.send_email_to_provider( + db_notification, + ) + + app.aws_ses_client.send_email.assert_called_once_with( + ANY, + ANY, + ANY, + body=ANY, + html_body=ANY, + reply_to_address="foo_two@bar.com", + attachments=[], + ) + + def test_get_html_email_renderer_should_return_for_normal_service(sample_service): options = send_to_providers.get_html_email_options(sample_service) assert options["fip_banner_english"] is True @@ -491,7 +699,6 @@ def test_get_html_email_renderer_should_return_for_normal_service(sample_service def test_get_html_email_renderer_with_branding_details( branding_type, fip_banner_english, fip_banner_french, notify_db, sample_service ): - email_branding = EmailBranding( brand_type=branding_type, colour="#000000", @@ -521,19 +728,20 @@ def test_get_html_email_renderer_with_branding_details_and_render_fip_banner_eng sample_service.email_branding = None notify_db.session.add_all([sample_service]) notify_db.session.commit() - options = send_to_providers.get_html_email_options(sample_service) assert options == { "fip_banner_english": True, "fip_banner_french": False, "logo_with_background_colour": False, + "alt_text_en": None, + "alt_text_fr": None, } def test_get_html_email_renderer_prepends_logo_path(notify_api): Service = namedtuple("Service", ["email_branding"]) - EmailBranding = namedtuple("EmailBranding", ["brand_type", "colour", "name", "logo", "text"]) + EmailBranding = namedtuple("EmailBranding", ["brand_type", "colour", "name", "logo", "text", "alt_text_en", "alt_text_fr"]) email_branding = EmailBranding( brand_type=BRANDING_ORG_NEW, @@ -541,6 +749,8 @@ def test_get_html_email_renderer_prepends_logo_path(notify_api): logo="justice-league.png", name="Justice League", text="League of Justice", + alt_text_en="alt_text_en", + alt_text_fr="alt_text_fr", ) service = Service( email_branding=email_branding, @@ -553,7 +763,7 @@ def test_get_html_email_renderer_prepends_logo_path(notify_api): def test_get_html_email_renderer_handles_email_branding_without_logo(notify_api): Service = namedtuple("Service", ["email_branding"]) - EmailBranding = namedtuple("EmailBranding", ["brand_type", "colour", "name", "logo", "text"]) + EmailBranding = namedtuple("EmailBranding", ["brand_type", "colour", "name", "logo", "text", "alt_text_en", "alt_text_fr"]) email_branding = EmailBranding( brand_type=BRANDING_ORG_BANNER_NEW, @@ -561,6 +771,8 @@ def test_get_html_email_renderer_handles_email_branding_without_logo(notify_api) logo=None, name="Justice League", text="League of Justice", + alt_text_en="alt_text_en", + alt_text_fr="alt_text_fr", ) service = Service( email_branding=email_branding, @@ -574,6 +786,8 @@ def test_get_html_email_renderer_handles_email_branding_without_logo(notify_api) assert renderer["brand_text"] == "League of Justice" assert renderer["brand_colour"] == "#000000" assert renderer["brand_name"] == "Justice League" + assert renderer["alt_text_en"] == "alt_text_en" + assert renderer["alt_text_fr"] == "alt_text_fr" def test_should_not_update_notification_if_research_mode_on_exception(sample_service, sample_notification, mocker): @@ -718,11 +932,15 @@ def test_should_handle_sms_sender_and_prefix_message( sender=expected_sender, to=ANY, reference=ANY, + template_id=ANY, + service_id=ANY, + sending_vehicle=ANY, ) def test_send_email_to_provider_uses_reply_to_from_notification(sample_email_template, mocker): mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") db_notification = save_notification(create_notification(template=sample_email_template, reply_to_text="test@test.com")) @@ -741,8 +959,26 @@ def test_send_email_to_provider_uses_reply_to_from_notification(sample_email_tem ) +def test_should_not_send_email_message_to_internal_test_address(sample_service, sample_email_template, mocker): + notification = save_notification( + create_notification( + template=sample_email_template, + to_field=Config.INTERNAL_TEST_EMAIL_ADDRESS, + status="created", + reply_to_text=sample_service.get_default_reply_to_email_address(), + ) + ) + mocker.patch("app.delivery.send_to_providers.send_email_response", return_value="reference") + send_mock = mocker.patch("app.aws_ses_client.send_email") + send_to_providers.send_email_to_provider(notification) + + send_mock.assert_not_called() + assert Notification.query.get(notification.id).status == "sending" + + def test_send_email_to_provider_should_format_reply_to_email_address(sample_email_template, mocker): mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") db_notification = save_notification(create_notification(template=sample_email_template, reply_to_text="test@test.com\t")) @@ -773,6 +1009,7 @@ def test_send_sms_to_provider_should_format_phone_number(sample_notification, mo def test_send_email_to_provider_should_format_email_address(sample_email_notification, mocker): sample_email_notification.to = "test@example.com\t" send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") send_to_providers.send_email_to_provider(sample_email_notification) @@ -789,98 +1026,97 @@ def test_send_email_to_provider_should_format_email_address(sample_email_notific ) -def test_notification_can_have_document_attachment_without_mlwr_sid(sample_email_template, mocker): - send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") - mlwr_mock = mocker.patch("app.delivery.send_to_providers.check_mlwr") - response = document_download_response() - del response["document"]["mlwr_sid"] - personalisation = {"file": response} +def test_file_attachment_retry(mocker, notify_db, notify_db_session): + template = create_sample_email_template(notify_db, notify_db_session, content="Here is your ((file))") - db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) - - send_to_providers.send_email_to_provider( - db_notification, - ) - - send_mock.assert_called() - mlwr_mock.assert_not_called() + class mock_response: + status_code = 200 + def json(): + return {"av-status": "clean"} -def test_notification_can_have_document_attachment_if_mlwr_sid_is_false(sample_email_template, mocker): - send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") - mlwr_mock = mocker.patch("app.delivery.send_to_providers.check_mlwr") - personalisation = {"file": document_download_response({"mlwr_sid": "false"})} + mocker.patch("app.delivery.send_to_providers.document_download_client.check_scan_verdict", return_value=mock_response) - db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) - - send_to_providers.send_email_to_provider( - db_notification, - ) - - send_mock.assert_called() - mlwr_mock.assert_not_called() + personalisation = { + "file": document_download_response( + { + "direct_file_url": "http://foo.bar/direct_file_url", + "url": "http://foo.bar/url", + "mime_type": "application/pdf", + } + ) + } + personalisation["file"]["document"]["sending_method"] = "attach" + personalisation["file"]["document"]["filename"] = "file.txt" + personalisation["file"]["document"]["id"] = "1234" + db_notification = save_notification(create_notification(template=template, personalisation=personalisation)) -def test_notification_raises_a_retry_exception_if_mlwr_state_is_missing(sample_email_template, mocker): + mocker.patch("app.delivery.send_to_providers.statsd_client") mocker.patch("app.aws_ses_client.send_email", return_value="reference") - mocker.patch("app.delivery.send_to_providers.check_mlwr", return_value={}) - personalisation = {"file": document_download_response()} - db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) + # When a urllib3 request attempts retries and fails it will wrap the offending exception in a MaxRetryError + # thus we'll capture the logged exception and assert it's a MaxRetryError to verify that retries were attempted + mock_logger = mocker.patch("app.delivery.send_to_providers.current_app.logger.error") + logger_args = [] - with pytest.raises(MalwarePendingException): - send_to_providers.send_email_to_provider( - db_notification, - ) + def mock_error(*args): + logger_args.append(args) + mock_logger.side_effect = mock_error -def test_notification_raises_a_retry_exception_if_mlwr_state_is_not_complete(sample_email_template, mocker): - mocker.patch("app.aws_ses_client.send_email", return_value="reference") - mocker.patch("app.delivery.send_to_providers.check_mlwr", return_value={"state": "foo"}) - personalisation = {"file": document_download_response()} + class MockHTTPResponse: + def __init__(self, status): + self.status = status + self.data = b"file content" if status == 200 else b"" - db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) + mock_http = mocker.patch("urllib3.PoolManager") + mock_http.return_value.request.side_effect = [ + MockHTTPResponse(500), + MockHTTPResponse(500), + MockHTTPResponse(500), + MockHTTPResponse(500), + MockHTTPResponse(500), + ] - with pytest.raises(MalwarePendingException): - send_to_providers.send_email_to_provider( - db_notification, - ) + send_to_providers.send_email_to_provider(db_notification) + exception = logger_args[0][0].split("Exception: ")[1] + assert mock_logger.call_count == 1 + assert "Max retries exceeded" in exception -def test_notification_raises_sets_notification_to_virus_found_if_mlwr_score_is_500(sample_email_template, mocker): - send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") - mocker.patch( - "app.delivery.send_to_providers.check_mlwr", - return_value={"state": "completed", "submission": {"max_score": 500}}, - ) - personalisation = {"file": document_download_response()} +def test_file_attachment_max_retries(mocker, notify_db, notify_db_session): + template = create_sample_email_template(notify_db, notify_db_session, content="Here is your ((file))") - db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) + class mock_response: + status_code = 200 - with pytest.raises(NotificationTechnicalFailureException) as e: - send_to_providers.send_email_to_provider(db_notification) - assert db_notification.id in e.value - send_mock.assert_not_called() - - assert Notification.query.get(db_notification.id).status == "virus-scan-failed" + def json(): + return {"av-status": "clean"} + mocker.patch("app.delivery.send_to_providers.document_download_client.check_scan_verdict", return_value=mock_response) -def test_notification_raises_sets_notification_to_virus_found_if_mlwr_score_above_500(sample_email_template, mocker): - send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") - mocker.patch( - "app.delivery.send_to_providers.check_mlwr", - return_value={"state": "completed", "submission": {"max_score": 501}}, - ) - personalisation = {"file": document_download_response()} + personalisation = { + "file": document_download_response( + { + "direct_file_url": "http://foo.bar/direct_file_url", + "url": "http://foo.bar/url", + "mime_type": "application/pdf", + } + ) + } + personalisation["file"]["document"]["sending_method"] = "attach" + personalisation["file"]["document"]["filename"] = "file.txt" - db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) + db_notification = save_notification(create_notification(template=template, personalisation=personalisation)) - with pytest.raises(NotificationTechnicalFailureException) as e: - send_to_providers.send_email_to_provider(db_notification) - assert db_notification.id in e.value - send_mock.assert_not_called() + mocker.patch("app.delivery.send_to_providers.statsd_client") + mocker.patch("app.aws_ses_client.send_email", return_value="reference") - assert Notification.query.get(db_notification.id).status == "virus-scan-failed" + mock_logger = mocker.patch("app.delivery.send_to_providers.current_app.logger.error") + send_to_providers.send_email_to_provider(db_notification) + assert mock_logger.call_count == 1 + assert "Max retries exceeded" in mock_logger.call_args[0][0] @pytest.mark.parametrize( @@ -899,14 +1135,23 @@ def test_notification_document_with_pdf_attachment( filename, expected_filename, ): - template = sample_email_template(notify_db, notify_db_session, content="Here is your ((file))") + template = create_sample_email_template(notify_db, notify_db_session, content="Here is your ((file))") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") + + class mock_response: + status_code = 200 + + def json(): + return {"av-status": "clean"} + + mocker.patch("app.delivery.send_to_providers.document_download_client.check_scan_verdict", return_value=mock_response) + personalisation = { "file": document_download_response( { "direct_file_url": "http://foo.bar/direct_file_url", "url": "http://foo.bar/url", "mime_type": "application/pdf", - "mlwr_sid": "false", } ) } @@ -920,27 +1165,26 @@ def test_notification_document_with_pdf_attachment( statsd_mock = mocker.patch("app.delivery.send_to_providers.statsd_client") send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") - request_mock = mocker.patch( - "app.delivery.send_to_providers.urllib.request.Request", - return_value="request_mock", + mocker.patch("app.delivery.send_to_providers.Retry") + + response_return_mock = MagicMock() + response_return_mock.status = 200 + response_return_mock.data = "Hello there!" + + response_mock = mocker.patch( + "app.delivery.send_to_providers.PoolManager.request", + return_value=response_return_mock, ) - # See https://stackoverflow.com/a/34929900 - cm = MagicMock() - cm.read.return_value = "request_content" - cm.__enter__.return_value = cm - urlopen_mock = mocker.patch("app.delivery.send_to_providers.urllib.request.urlopen") - urlopen_mock.return_value = cm send_to_providers.send_email_to_provider(db_notification) attachments = [] if filename_attribute_present: - request_mock.assert_called_once_with("http://foo.bar/direct_file_url") - urlopen_mock.assert_called_once_with("request_mock") + response_mock.assert_called_with("GET", url="http://foo.bar/direct_file_url") attachments = [ { - "data": "request_content", "name": expected_filename, + "data": "Hello there!", "mime_type": "application/pdf", } ] @@ -974,14 +1218,13 @@ def test_notification_document_with_pdf_attachment( ], ) def test_notification_with_bad_file_attachment_url(mocker, notify_db, notify_db_session, sending_method): - template = sample_email_template(notify_db, notify_db_session, content="Here is your ((file))") + template = create_sample_email_template(notify_db, notify_db_session, content="Here is your ((file))") personalisation = { "file": document_download_response( { "direct_file_url": "file://foo.bar/file.txt" if sending_method == "attach" else "http://foo.bar/file.txt", "url": "file://foo.bar/file.txt" if sending_method == "link" else "http://foo.bar/file.txt", "mime_type": "application/pdf", - "mlwr_sid": "false", } ) } @@ -991,12 +1234,7 @@ def test_notification_with_bad_file_attachment_url(mocker, notify_db, notify_db_ db_notification = save_notification(create_notification(template=template, personalisation=personalisation)) - # See https://stackoverflow.com/a/34929900 - cm = MagicMock() - cm.read.return_value = "request_content" - cm.__enter__.return_value = cm - urlopen_mock = mocker.patch("app.delivery.send_to_providers.urllib.request.urlopen") - urlopen_mock.return_value = cm + mocker.patch("app.delivery.send_to_providers.Retry") with pytest.raises(InvalidUrlException): send_to_providers.send_email_to_provider(db_notification) @@ -1032,6 +1270,7 @@ def test_notification_raises_error_if_message_contains_sin_pii_that_passes_luhn( def test_notification_passes_if_message_contains_sin_pii_that_fails_luhn(sample_email_template_with_html, mocker, notify_api): send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") db_notification = save_notification( create_notification( @@ -1050,6 +1289,7 @@ def test_notification_passes_if_message_contains_sin_pii_that_fails_luhn(sample_ def test_notification_passes_if_message_contains_phone_number(sample_email_template_with_html, mocker): send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") db_notification = save_notification( create_notification( @@ -1075,3 +1315,192 @@ def test_is_service_allowed_html(sample_service: Service, notify_api): }, ): assert send_to_providers.is_service_allowed_html(sample_service) + + +class TestMalware: + def test_send_to_providers_fails_if_malware_detected(self, sample_email_template, mocker): + send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") + + class mock_response: + status_code = 423 + + def json(): + return {"av-status": "malicious"} + + mocker.patch("app.delivery.send_to_providers.document_download_client.check_scan_verdict", return_value=mock_response) + personalisation = {"file": document_download_response()} + + db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) + + with pytest.raises(MalwareDetectedException) as e: + send_to_providers.send_email_to_provider(db_notification) + assert db_notification.id in e.value + send_mock.assert_not_called() + + assert Notification.query.get(db_notification.id).status == "virus-scan-failed" + + def test_send_to_providers_fails_if_malware_scan_in_progress(self, sample_email_template, mocker): + send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") + + class mock_response: + status_code = 428 + + def json(): + return {"av-status": "in_progress"} + + mocker.patch("app.delivery.send_to_providers.document_download_client.check_scan_verdict", return_value=mock_response) + personalisation = {"file": document_download_response()} + + db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) + + with pytest.raises(MalwareScanInProgressException) as e: + send_to_providers.send_email_to_provider(db_notification) + assert db_notification.id in e.value + send_mock.assert_not_called() + + assert Notification.query.get(db_notification.id).status == "created" + + @pytest.mark.parametrize( + "status_code_returned, scan_verdict", + [ + (200, "clean"), + (408, "scan_timed_out"), + ], + ) + def test_send_to_providers_succeeds_if_malware_verdict_clean( + self, sample_email_template, mocker, status_code_returned, scan_verdict + ): + send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.delivery.send_to_providers.bounce_rate_client") + + class mock_response: + status_code = status_code_returned + + def json(): + return {"av-status": scan_verdict} + + mocker.patch("app.delivery.send_to_providers.document_download_client.check_scan_verdict", return_value=mock_response) + personalisation = {"file": document_download_response()} + + db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) + + send_to_providers.send_email_to_provider(db_notification) + send_mock.assert_called_once() + + assert Notification.query.get(db_notification.id).status == "sending" + + def test_send_to_providers_fails_if_document_download_internal_error(self, sample_email_template, mocker): + send_mock = mocker.patch("app.aws_ses_client.send_email", return_value="reference") + + class mock_response: + status_code = 404 + + def json(): + return {"av-status": "None"} + + mocker.patch("app.delivery.send_to_providers.document_download_client.check_scan_verdict", return_value=mock_response) + personalisation = {"file": document_download_response()} + + db_notification = save_notification(create_notification(template=sample_email_template, personalisation=personalisation)) + + with pytest.raises(DocumentDownloadException) as e: + send_to_providers.send_email_to_provider(db_notification) + assert db_notification.id in e.value + send_mock.assert_not_called() + + assert Notification.query.get(db_notification.id).status == "technical-failure" + + +class TestBounceRate: + def test_send_email_should_use_service_reply_to_email(self, sample_service, sample_email_template, mocker, notify_api): + mocker.patch("app.aws_ses_client.send_email", return_value="reference") + mocker.patch("app.bounce_rate_client.set_sliding_notifications") + db_notification = save_notification(create_notification(template=sample_email_template, reply_to_text="foo@bar.com")) + create_reply_to_email(service=sample_service, email_address="foo@bar.com") + + send_to_providers.send_email_to_provider( + db_notification, + ) + app.bounce_rate_client.set_sliding_notifications.assert_called_once_with(sample_service.id, str(db_notification.id)) + + def test_check_service_over_bounce_rate_critical(self, mocker: MockFixture, notify_api, fake_uuid): + with notify_api.app_context(): + mocker.patch("app.bounce_rate_client.check_bounce_rate_status", return_value=BounceRateStatus.CRITICAL.value) + mocker.patch("app.bounce_rate_client.get_bounce_rate", return_value=current_app.config["BR_CRITICAL_PERCENTAGE"]) + mock_logger = mocker.patch("app.delivery.send_to_providers.current_app.logger.warning") + send_to_providers.check_service_over_bounce_rate(fake_uuid) + mock_logger.assert_called_once_with( + f"Service: {fake_uuid} has met or exceeded a critical bounce rate threshold of 10%. Bounce rate: {current_app.config['BR_CRITICAL_PERCENTAGE']}" + ) + + def test_check_service_over_bounce_rate_warning(self, mocker: MockFixture, notify_api, fake_uuid): + with notify_api.app_context(): + mocker.patch("app.bounce_rate_client.check_bounce_rate_status", return_value=BounceRateStatus.WARNING.value) + mocker.patch("app.bounce_rate_client.get_bounce_rate", return_value=current_app.config["BR_WARNING_PERCENTAGE"]) + mock_logger = mocker.patch("app.notifications.validators.current_app.logger.warning") + send_to_providers.check_service_over_bounce_rate(fake_uuid) + mock_logger.assert_called_once_with( + f"Service: {fake_uuid} has met or exceeded a warning bounce rate threshold of 5%. Bounce rate: {current_app.config['BR_WARNING_PERCENTAGE']}" + ) + + def test_check_service_over_bounce_rate_normal(self, mocker: MockFixture, notify_api, fake_uuid): + with notify_api.app_context(): + mocker.patch("app.bounce_rate_client.check_bounce_rate_status", return_value=BounceRateStatus.NORMAL.value) + mocker.patch("app.bounce_rate_client.get_bounce_rate", return_value=0.0) + mock_logger = mocker.patch("app.notifications.validators.current_app.logger.warning") + assert send_to_providers.check_service_over_bounce_rate(fake_uuid) is None + mock_logger.assert_not_called() + + +@pytest.mark.parametrize( + "encoded_text, charset, encoding, expected", + [ + ("hello_world", "utf-8", "B", "=?utf-8?B?hello_world?="), + ("hello_world", "utf-8", "Q", "=?utf-8?Q?hello_world?="), + ("hello_world2", "utf-8", "B", "=?utf-8?B?hello_world2?="), + ], +) +def test_mime_encoded_word_syntax_encoding(encoded_text, charset, encoding, expected): + result = send_to_providers.mime_encoded_word_syntax(encoded_text=encoded_text, charset=charset, encoding=encoding) + assert result == expected + + +class TestGetFromAddress(TestCase): + def test_get_from_address_ascii(self): + # Arrange + friendly_from = "John Doe" + email_from = "johndoe" + sending_domain = "example.com" + + # Act + result = send_to_providers.get_from_address(friendly_from, email_from, sending_domain) + + # Assert + expected_result = '"=?utf-8?B?Sm9obiBEb2U=?=" ' + self.assertEqual(result, expected_result) + + def test_get_from_address_non_ascii(self): + # Arrange + friendly_from = "Jöhn Döe" + email_from = "johndoe" + sending_domain = "example.com" + + # Act + result = send_to_providers.get_from_address(friendly_from, email_from, sending_domain) + + # Assert + expected_result = '"=?utf-8?B?SsO2aG4gRMO2ZQ==?=" ' + self.assertEqual(result, expected_result) + + def test_get_from_address_empty_friendly_from(self): + # Arrange + friendly_from = "" + email_from = "johndoe" + sending_domain = "example.com" + + # Act + result = send_to_providers.get_from_address(friendly_from, email_from, sending_domain) + + # Assert + expected_result = '"=?utf-8?B??=" ' + self.assertEqual(result, expected_result) diff --git a/tests/app/email_branding/test_rest.py b/tests/app/email_branding/test_rest.py index c09218d62d..f2a360a17e 100644 --- a/tests/app/email_branding/test_rest.py +++ b/tests/app/email_branding/test_rest.py @@ -4,8 +4,8 @@ from tests.app.db import create_email_branding -def test_get_email_branding_options(admin_request, notify_db, notify_db_session): - email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1") +def test_get_email_branding_options(admin_request, notify_db, notify_db_session, sample_organisation): + email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1", organisation_id=sample_organisation.id) email_branding2 = EmailBranding(colour="#000000", logo="/path/other.png", name="Org2") notify_db.session.add_all([email_branding1, email_branding2]) notify_db.session.commit() @@ -17,10 +17,31 @@ def test_get_email_branding_options(admin_request, notify_db, notify_db_session) str(email_branding1.id), str(email_branding2.id), } + assert email_branding[0]["organisation_id"] == str(sample_organisation.id) + assert email_branding[1]["organisation_id"] == "" + + +def test_get_email_branding_options_filter_org(admin_request, notify_db, notify_db_session, sample_organisation): + email_branding1 = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Org1", organisation_id=sample_organisation.id) + email_branding2 = EmailBranding(colour="#000000", logo="/path/other.png", name="Org2") + notify_db.session.add_all([email_branding1, email_branding2]) + notify_db.session.commit() + email_branding = admin_request.get("email_branding.get_email_branding_options", organisation_id=sample_organisation.id)[ + "email_branding" + ] + + assert len(email_branding) == 1 + assert email_branding[0]["organisation_id"] == str(sample_organisation.id) + + email_branding2 = admin_request.get("email_branding.get_email_branding_options")["email_branding"] + + assert len(email_branding2) == 2 def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): - email_branding = EmailBranding(colour="#FFFFFF", logo="/path/image.png", name="Some Org", text="My Org") + email_branding = EmailBranding( + colour="#FFFFFF", logo="/path/image.png", name="Some Org", text="My Org", alt_text_en="hello world" + ) notify_db.session.add(email_branding) notify_db.session.commit() @@ -37,6 +58,9 @@ def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): "id", "text", "brand_type", + "organisation_id", + "alt_text_en", + "alt_text_fr", } assert response["email_branding"]["colour"] == "#FFFFFF" assert response["email_branding"]["logo"] == "/path/image.png" @@ -44,6 +68,8 @@ def test_get_email_branding_by_id(admin_request, notify_db, notify_db_session): assert response["email_branding"]["text"] == "My Org" assert response["email_branding"]["id"] == str(email_branding.id) assert response["email_branding"]["brand_type"] == str(email_branding.brand_type) + assert response["email_branding"]["alt_text_en"] == "hello world" + assert response["email_branding"]["alt_text_fr"] is None def test_post_create_email_branding(admin_request, notify_db_session): @@ -52,6 +78,8 @@ def test_post_create_email_branding(admin_request, notify_db_session): "colour": "#0000ff", "logo": "/images/test_x2.png", "brand_type": BRANDING_ORG_NEW, + "alt_text_en": "hello world", + "alt_text_fr": "bonjour le monde", } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) assert data["name"] == response["data"]["name"] @@ -59,6 +87,8 @@ def test_post_create_email_branding(admin_request, notify_db_session): assert data["logo"] == response["data"]["logo"] assert data["name"] == response["data"]["text"] assert data["brand_type"] == response["data"]["brand_type"] + assert data["alt_text_en"] == response["data"]["alt_text_en"] + assert data["alt_text_fr"] == response["data"]["alt_text_fr"] def test_post_create_email_branding_without_brand_type_defaults(admin_request, notify_db_session): @@ -66,16 +96,15 @@ def test_post_create_email_branding_without_brand_type_defaults(admin_request, n "name": "test email_branding", "colour": "#0000ff", "logo": "/images/test_x2.png", + "alt_text_en": "hello world", + "alt_text_fr": "bonjour le monde", } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) assert BRANDING_ORG_NEW == response["data"]["brand_type"] def test_post_create_email_branding_without_logo_is_ok(admin_request, notify_db_session): - data = { - "name": "test email_branding", - "colour": "#0000ff", - } + data = {"name": "test email_branding", "colour": "#0000ff", "alt_text_en": "hello", "alt_text_fr": "bonjour"} response = admin_request.post( "email_branding.create_email_branding", _data=data, @@ -85,13 +114,15 @@ def test_post_create_email_branding_without_logo_is_ok(admin_request, notify_db_ def test_post_create_email_branding_colour_is_valid(admin_request, notify_db_session): - data = {"logo": "images/text_x2.png", "name": "test branding"} + data = {"logo": "images/text_x2.png", "name": "test branding", "alt_text_en": "hello", "alt_text_fr": "bonjour"} response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) assert response["data"]["logo"] == data["logo"] assert response["data"]["name"] == "test branding" assert response["data"]["colour"] is None assert response["data"]["text"] == "test branding" + assert response["data"]["alt_text_en"] == "hello" + assert response["data"]["alt_text_fr"] == "bonjour" def test_post_create_email_branding_with_text(admin_request, notify_db_session): @@ -99,6 +130,8 @@ def test_post_create_email_branding_with_text(admin_request, notify_db_session): "text": "text for brand", "logo": "images/text_x2.png", "name": "test branding", + "alt_text_en": "hello", + "alt_text_fr": "bonjour", } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) @@ -106,6 +139,8 @@ def test_post_create_email_branding_with_text(admin_request, notify_db_session): assert response["data"]["name"] == "test branding" assert response["data"]["colour"] is None assert response["data"]["text"] == "text for brand" + assert response["data"]["alt_text_en"] == "hello" + assert response["data"]["alt_text_fr"] == "bonjour" def test_post_create_email_branding_with_text_and_name(admin_request, notify_db_session): @@ -113,6 +148,8 @@ def test_post_create_email_branding_with_text_and_name(admin_request, notify_db_ "name": "name for brand", "text": "text for brand", "logo": "images/text_x2.png", + "alt_text_en": "hello", + "alt_text_fr": "bonjour", } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) @@ -120,20 +157,35 @@ def test_post_create_email_branding_with_text_and_name(admin_request, notify_db_ assert response["data"]["name"] == "name for brand" assert response["data"]["colour"] is None assert response["data"]["text"] == "text for brand" + assert response["data"]["alt_text_en"] == "hello" + assert response["data"]["alt_text_fr"] == "bonjour" def test_post_create_email_branding_with_text_as_none_and_name(admin_request, notify_db_session): - data = {"name": "name for brand", "text": None, "logo": "images/text_x2.png"} + data = { + "name": "name for brand", + "text": None, + "logo": "images/text_x2.png", + "alt_text_en": "hello", + "alt_text_fr": "bonjour", + } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) assert response["data"]["logo"] == data["logo"] assert response["data"]["name"] == "name for brand" assert response["data"]["colour"] is None assert response["data"]["text"] is None + assert response["data"]["alt_text_en"] == "hello" + assert response["data"]["alt_text_fr"] == "bonjour" def test_post_create_email_branding_returns_400_when_name_is_missing(admin_request, notify_db_session): - data = {"text": "some text", "logo": "images/text_x2.png"} + data = { + "text": "some text", + "logo": "images/text_x2.png", + "alt_text_en": "hello", + "alt_text_fr": "bonjour", + } response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=400) assert response["errors"][0]["message"] == "name is a required property" @@ -150,7 +202,7 @@ def test_post_create_email_branding_returns_400_when_name_is_missing(admin_reque ], ) def test_post_update_email_branding_updates_field(admin_request, notify_db_session, data_update): - data = {"name": "test email_branding", "logo": "images/text_x2.png"} + data = {"name": "test email_branding", "logo": "images/text_x2.png", "alt_text_en": "hello", "alt_text_fr": "bonjour"} response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) email_branding_id = response["data"]["id"] @@ -179,7 +231,7 @@ def test_post_update_email_branding_updates_field(admin_request, notify_db_sessi ], ) def test_post_update_email_branding_updates_field_with_text(admin_request, notify_db_session, data_update): - data = {"name": "test email_branding", "logo": "images/text_x2.png"} + data = {"name": "test email_branding", "logo": "images/text_x2.png", "alt_text_en": "hello", "alt_text_fr": "bonjour"} response = admin_request.post("email_branding.create_email_branding", _data=data, _expected_status=201) email_branding_id = response["data"]["id"] diff --git a/tests/app/invite/test_invite_rest.py b/tests/app/invite/test_invite_rest.py index c380fa2f76..66843d1ee6 100644 --- a/tests/app/invite/test_invite_rest.py +++ b/tests/app/invite/test_invite_rest.py @@ -4,6 +4,7 @@ from app.models import EMAIL_AUTH_TYPE, Notification from tests import create_authorization_header +from tests.app.conftest import create_sample_invited_user @pytest.mark.parametrize( @@ -118,14 +119,11 @@ def test_create_invited_user_invalid_email(client, sample_service, mocker, fake_ def test_get_all_invited_users_by_service(client, notify_db, notify_db_session, sample_service): - - from tests.app.conftest import sample_invited_user - invites = [] for i in range(0, 5): email = "invited_user_{}@service.gov.uk".format(i) - invited_user = sample_invited_user(notify_db, notify_db_session, sample_service, email) + invited_user = create_sample_invited_user(notify_db, notify_db_session, sample_service, email) invites.append(invited_user) url = "/service/{}/invite".format(sample_service.id) diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 97f537c956..338d3a5d84 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -8,12 +8,14 @@ import app.celery.tasks from app.dao.templates_dao import dao_update_template -from app.models import JOB_STATUS_PENDING, JOB_STATUS_TYPES +from app.models import JOB_STATUS_PENDING, JOB_STATUS_TYPES, ServiceSmsSender from tests import create_authorization_header from tests.app.db import ( create_ft_notification_status, create_job, create_notification, + create_service_with_inbound_number, + create_template, save_notification, ) from tests.conftest import set_config @@ -46,7 +48,7 @@ def test_get_job_with_unknown_id_returns404(client, sample_template, fake_uuid): response = client.get(path, headers=[auth_header]) assert response.status_code == 404 resp_json = json.loads(response.get_data(as_text=True)) - assert resp_json == {"message": "No result found", "result": "error"} + assert resp_json == {"message": "Job not found in database", "result": "error"} def test_cancel_job(client, sample_scheduled_job): @@ -72,6 +74,7 @@ def test_cant_cancel_normal_job(client, sample_job, mocker): assert mock_update.call_count == 0 +@pytest.mark.skip(reason="Letter tests") @freeze_time("2019-06-13 13:00") def test_cancel_letter_job_updates_notifications_and_job_to_cancelled(sample_letter_template, admin_request, mocker): job = create_job(template=sample_letter_template, notification_count=1, job_status="finished") @@ -94,6 +97,7 @@ def test_cancel_letter_job_updates_notifications_and_job_to_cancelled(sample_let assert response == 1 +@pytest.mark.skip(reason="Letter tests") @freeze_time("2019-06-13 13:00") def test_cancel_letter_job_does_not_call_cancel_if_can_letter_job_be_cancelled_returns_False( sample_letter_template, admin_request, mocker @@ -123,6 +127,71 @@ def test_cancel_letter_job_does_not_call_cancel_if_can_letter_job_be_cancelled_r assert response["message"] == "Sorry, it's too late, letters have already been sent." +def test_create_unscheduled_email_job_increments_daily_count(client, mocker, sample_email_job, fake_uuid): + mocker.patch("app.celery.tasks.process_job.apply_async") + mocker.patch("app.job.rest.increment_email_daily_count_send_warnings_if_needed") + mocker.patch( + "app.job.rest.get_job_metadata_from_s3", + return_value={ + "template_id": sample_email_job.template_id, + "original_file_name": sample_email_job.original_file_name, + "notification_count": "1", + "valid": "True", + }, + ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="email address\r\nsome@email.com", + ) + mocker.patch("app.dao.services_dao.dao_fetch_service_by_id", return_value=sample_email_job.service) + data = { + "id": fake_uuid, + "created_by": str(sample_email_job.created_by.id), + } + path = "/service/{}/job".format(sample_email_job.service_id) + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + + response = client.post(path, data=json.dumps(data), headers=headers) + + assert response.status_code == 201 + + app.celery.tasks.process_job.apply_async.assert_called_once_with(([str(fake_uuid)]), queue="job-tasks") + app.job.rest.increment_email_daily_count_send_warnings_if_needed.assert_called_once_with(sample_email_job.service, 1) + + +def test_create_future_not_same_day_scheduled_email_job_does_not_increment_daily_count( + client, mocker, sample_email_job, fake_uuid +): + scheduled_date = (datetime.utcnow() + timedelta(hours=36, minutes=59)).isoformat() + mocker.patch("app.celery.tasks.process_job.apply_async") + mocker.patch("app.job.rest.increment_email_daily_count_send_warnings_if_needed") + mocker.patch( + "app.job.rest.get_job_metadata_from_s3", + return_value={ + "template_id": sample_email_job.template_id, + "original_file_name": sample_email_job.original_file_name, + "notification_count": "1", + "valid": "True", + }, + ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="email address\r\nsome@email.com", + ) + mocker.patch("app.dao.services_dao.dao_fetch_service_by_id", return_value=sample_email_job.service) + data = {"id": fake_uuid, "created_by": str(sample_email_job.created_by.id), "scheduled_for": scheduled_date} + path = "/service/{}/job".format(sample_email_job.service_id) + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + + response = client.post(path, data=json.dumps(data), headers=headers) + + assert response.status_code == 201 + + app.job.rest.increment_email_daily_count_send_warnings_if_needed.assert_not_called() + + def test_create_unscheduled_job(client, sample_template, mocker, fake_uuid): mocker.patch("app.celery.tasks.process_job.apply_async") mocker.patch( @@ -134,6 +203,10 @@ def test_create_unscheduled_job(client, sample_template, mocker, fake_uuid): "valid": "True", }, ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) data = { "id": fake_uuid, "created_by": str(sample_template.created_by.id), @@ -171,6 +244,10 @@ def test_create_unscheduled_job_with_sender_id_in_metadata(client, sample_templa "sender_id": fake_uuid, }, ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) data = { "id": fake_uuid, "created_by": str(sample_template.created_by.id), @@ -188,6 +265,39 @@ def test_create_unscheduled_job_with_sender_id_in_metadata(client, sample_templa app.celery.tasks.process_job.apply_async.assert_called_once_with(([str(fake_uuid)]), queue="job-tasks") +def test_create_job_sets_sender_id_from_database(client, mocker, fake_uuid, sample_user): + service = create_service_with_inbound_number(inbound_number="12345") + template = create_template(service=service) + sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first() + + mocker.patch("app.celery.tasks.process_job.apply_async") + mocker.patch( + "app.job.rest.get_job_metadata_from_s3", + return_value={ + "template_id": str(template.id), + "original_file_name": "thisisatest.csv", + "notification_count": "1", + "valid": "True", + }, + ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) + data = { + "id": fake_uuid, + "created_by": str(template.created_by.id), + } + path = "/service/{}/job".format(service.id) + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + + response = client.post(path, data=json.dumps(data), headers=headers) + resp_json = json.loads(response.get_data(as_text=True)) + + assert resp_json["data"]["sender_id"] == str(sms_sender.id) + + @freeze_time("2016-01-01 12:00:00.000000") def test_create_scheduled_job(client, sample_template, mocker, fake_uuid): scheduled_date = (datetime.utcnow() + timedelta(hours=95, minutes=59)).isoformat() @@ -201,6 +311,10 @@ def test_create_scheduled_job(client, sample_template, mocker, fake_uuid): "valid": "True", }, ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) data = { "id": fake_uuid, "created_by": str(sample_template.created_by.id), @@ -218,7 +332,9 @@ def test_create_scheduled_job(client, sample_template, mocker, fake_uuid): resp_json = json.loads(response.get_data(as_text=True)) assert resp_json["data"]["id"] == fake_uuid - assert resp_json["data"]["scheduled_for"] == datetime(2016, 1, 5, 11, 59, 0, tzinfo=pytz.UTC).isoformat() + assert resp_json["data"]["scheduled_for"] == datetime(2016, 1, 5, 11, 59, 0, tzinfo=pytz.UTC).isoformat( + timespec="microseconds" + ) assert resp_json["data"]["job_status"] == "scheduled" assert resp_json["data"]["template"] == str(sample_template.id) assert resp_json["data"]["original_file_name"] == "thisisatest.csv" @@ -242,26 +358,22 @@ def test_create_job_returns_403_if_service_is_not_active(client, fake_uuid, samp mock_job_dao.assert_not_called() -@pytest.mark.parametrize( - "extra_metadata", - ( - {}, - {"valid": "anything not the string True"}, - ), -) -def test_create_job_returns_400_if_file_is_invalid( - client, - fake_uuid, - sample_template, - mocker, - extra_metadata, -): +@pytest.mark.parametrize("extra_metadata, test_run", [({}, 1), ({"valid": "anything not the string True"}, 2)]) +def test_create_job_returns_400_if_file_is_invalid(client, fake_uuid, sample_template, mocker, extra_metadata, test_run): mock_job_dao = mocker.patch("app.dao.jobs_dao.dao_create_job") auth_header = create_authorization_header() metadata = dict( - template_id=str(sample_template.id), original_file_name="thisisatest.csv", notification_count=1, **extra_metadata + template_id=str(sample_template.id), + original_file_name=f"thisisatest{test_run}.csv", + notification_count=1, + **extra_metadata, ) mocker.patch("app.job.rest.get_job_metadata_from_s3", return_value=metadata) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) + data = {"id": fake_uuid} response = client.post( "/service/{}/job".format(sample_template.service.id), @@ -276,6 +388,7 @@ def test_create_job_returns_400_if_file_is_invalid( mock_job_dao.assert_not_called() +@pytest.mark.skip(reason="Letter tests") def test_create_job_returns_403_if_letter_template_type_and_service_in_trial( client, fake_uuid, sample_trial_letter_template, mocker ): @@ -319,6 +432,10 @@ def test_should_not_create_scheduled_job_too_far_in_the_future(client, sample_te "valid": "True", }, ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) data = { "id": fake_uuid, "created_by": str(sample_template.created_by.id), @@ -352,6 +469,10 @@ def test_should_not_create_scheduled_job_in_the_past(client, sample_template, mo "valid": "True", }, ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) data = { "id": fake_uuid, "created_by": str(sample_template.created_by.id), @@ -402,6 +523,10 @@ def test_create_job_returns_400_if_missing_data(client, sample_template, mocker, "template_id": str(sample_template.id), }, ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) data = { "id": fake_uuid, "valid": "True", @@ -426,6 +551,7 @@ def test_create_job_returns_404_if_template_does_not_exist(client, sample_servic "app.job.rest.get_job_metadata_from_s3", return_value={ "template_id": str(sample_service.id), + "valid": "True", }, ) data = { @@ -477,6 +603,10 @@ def test_create_job_returns_400_if_archived_template(client, sample_template, mo "template_id": str(sample_template.id), }, ) + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) data = { "id": fake_uuid, "valid": "True", @@ -666,8 +796,8 @@ def test_get_jobs_should_paginate(admin_request, sample_template): with set_config(admin_request.app, "PAGE_SIZE", 2): resp_json = admin_request.get("job.get_jobs_by_service", service_id=sample_template.service_id) - assert resp_json["data"][0]["created_at"] == "2015-01-01T10:00:00+00:00" - assert resp_json["data"][1]["created_at"] == "2015-01-01T09:00:00+00:00" + assert resp_json["data"][0]["created_at"] == "2015-01-01T10:00:00.000000+00:00" + assert resp_json["data"][1]["created_at"] == "2015-01-01T09:00:00.000000+00:00" assert resp_json["page_size"] == 2 assert resp_json["total"] == 10 assert "links" in resp_json @@ -680,8 +810,8 @@ def test_get_jobs_accepts_page_parameter(admin_request, sample_template): with set_config(admin_request.app, "PAGE_SIZE", 2): resp_json = admin_request.get("job.get_jobs_by_service", service_id=sample_template.service_id, page=2) - assert resp_json["data"][0]["created_at"] == "2015-01-01T08:00:00+00:00" - assert resp_json["data"][1]["created_at"] == "2015-01-01T07:00:00+00:00" + assert resp_json["data"][0]["created_at"] == "2015-01-01T08:00:00.000000+00:00" + assert resp_json["data"][1]["created_at"] == "2015-01-01T07:00:00.000000+00:00" assert resp_json["page_size"] == 2 assert resp_json["total"] == 10 assert "links" in resp_json diff --git a/tests/app/letters/test_letter_utils.py b/tests/app/letters/test_letter_utils.py deleted file mode 100644 index b8291ac97b..0000000000 --- a/tests/app/letters/test_letter_utils.py +++ /dev/null @@ -1,378 +0,0 @@ -from datetime import datetime - -import boto3 -import pytest -from flask import current_app -from freezegun import freeze_time -from moto import mock_s3 - -from app.letters.utils import ( - ScanErrorType, - copy_redaction_failed_pdf, - get_bucket_name_and_prefix_for_notification, - get_folder_name, - get_letter_pdf, - get_letter_pdf_filename, - letter_print_day, - move_failed_pdf, - upload_letter_pdf, -) -from app.models import ( - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - NOTIFICATION_VALIDATION_FAILED, - PRECOMPILED_TEMPLATE_NAME, -) -from tests.app.db import create_notification, save_notification - -FROZEN_DATE_TIME = "2018-03-14 17:00:00" - - -@pytest.fixture(name="sample_precompiled_letter_notification") -def _sample_precompiled_letter_notification(sample_letter_notification): - sample_letter_notification.template.hidden = True - sample_letter_notification.template.name = PRECOMPILED_TEMPLATE_NAME - sample_letter_notification.reference = "foo" - with freeze_time(FROZEN_DATE_TIME): - sample_letter_notification.created_at = datetime.utcnow() - sample_letter_notification.updated_at = datetime.utcnow() - return sample_letter_notification - - -@pytest.fixture(name="sample_precompiled_letter_notification_using_test_key") -def _sample_precompiled_letter_notification_using_test_key( - sample_precompiled_letter_notification, -): - sample_precompiled_letter_notification.key_type = KEY_TYPE_TEST - return sample_precompiled_letter_notification - - -@pytest.mark.parametrize( - "created_at,folder", - [ - (datetime(2017, 1, 1, 17, 29), "2017-01-01"), - (datetime(2017, 1, 1, 17, 31), "2017-01-02"), - ], -) -@pytest.mark.skip(reason="Letter feature") -def test_get_bucket_name_and_prefix_for_notification_valid_notification(sample_notification, created_at, folder): - sample_notification.created_at = created_at - sample_notification.updated_at = created_at - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert bucket_prefix == "{folder}/NOTIFY.{reference}".format(folder=folder, reference=sample_notification.reference).upper() - - -def test_get_bucket_name_and_prefix_for_notification_get_from_sent_at_date( - sample_notification, -): - sample_notification.created_at = datetime(2019, 8, 1, 17, 35) - sample_notification.sent_at = datetime(2019, 8, 2, 17, 45) - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert ( - bucket_prefix - == "{folder}/NOTIFY.{reference}".format(folder="2019-08-02", reference=sample_notification.reference).upper() - ) - - -def test_get_bucket_name_and_prefix_for_notification_from_created_at_date( - sample_notification, -): - sample_notification.created_at = datetime(2019, 8, 1, 12, 00) - sample_notification.updated_at = datetime(2019, 8, 2, 12, 00) - sample_notification.sent_at = datetime(2019, 8, 3, 12, 00) - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config["LETTERS_PDF_BUCKET_NAME"] - assert ( - bucket_prefix - == "{folder}/NOTIFY.{reference}".format(folder="2019-08-03", reference=sample_notification.reference).upper() - ) - - -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_notification_precompiled_letter_using_test_key( - sample_precompiled_letter_notification_using_test_key, -): - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification_using_test_key) - - assert bucket == current_app.config["TEST_LETTERS_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification_using_test_key.reference).upper() - - -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_notification_templated_letter_using_test_key( - sample_letter_notification, -): - sample_letter_notification.key_type = KEY_TYPE_TEST - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_letter_notification) - - assert bucket == current_app.config["TEST_LETTERS_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_letter_notification.reference).upper() - - -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_failed_validation( - sample_precompiled_letter_notification, -): - sample_precompiled_letter_notification.status = NOTIFICATION_VALIDATION_FAILED - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification) - - assert bucket == current_app.config["INVALID_PDF_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification.reference).upper() - - -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_test_noti_with_failed_validation( - sample_precompiled_letter_notification_using_test_key, -): - sample_precompiled_letter_notification_using_test_key.status = NOTIFICATION_VALIDATION_FAILED - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification_using_test_key) - - assert bucket == current_app.config["INVALID_PDF_BUCKET_NAME"] - assert bucket_prefix == "NOTIFY.{}".format(sample_precompiled_letter_notification_using_test_key.reference).upper() - - -def test_get_bucket_name_and_prefix_for_notification_invalid_notification(): - with pytest.raises(AttributeError): - get_bucket_name_and_prefix_for_notification(None) - - -@pytest.mark.parametrize( - "crown_flag,expected_crown_text", - [ - (True, "C"), - (False, "N"), - ], -) -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_filename(notify_api, mocker, crown_flag, expected_crown_text): - filename = get_letter_pdf_filename(reference="foo", crown=crown_flag) - - assert filename == "2017-12-04/NOTIFY.FOO.D.2.C.{}.20171204172900.PDF".format(expected_crown_text) - - -@pytest.mark.parametrize( - "postage,expected_postage", - [ - ("second", 2), - ("first", 1), - ], -) -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_postage_for_filename(notify_api, postage, expected_postage): - filename = get_letter_pdf_filename(reference="foo", crown=True, postage=postage) - - assert filename == "2017-12-04/NOTIFY.FOO.D.{}.C.C.20171204172900.PDF".format(expected_postage) - - -@freeze_time("2017-12-04 17:29:00") -def test_get_letter_pdf_filename_returns_correct_filename_for_test_letters(notify_api, mocker): - filename = get_letter_pdf_filename(reference="foo", crown="C", is_scan_letter=True) - - assert filename == "NOTIFY.FOO.D.2.C.C.20171204172900.PDF" - - -@freeze_time("2017-12-04 17:31:00") -@pytest.mark.skip(reason="Letter feature") -def test_get_letter_pdf_filename_returns_tomorrows_filename(notify_api, mocker): - filename = get_letter_pdf_filename(reference="foo", crown=True) - - assert filename == "2017-12-05/NOTIFY.FOO.D.2.C.C.20171204173100.PDF" - - -@mock_s3 -@pytest.mark.parametrize( - "bucket_config_name,filename_format", - [ - ("TEST_LETTERS_BUCKET_NAME", "NOTIFY.FOO.D.2.C.C.%Y%m%d%H%M%S.PDF"), - ("LETTERS_PDF_BUCKET_NAME", "%Y-%m-%d/NOTIFY.FOO.D.2.C.C.%Y%m%d%H%M%S.PDF"), - ], -) -@freeze_time(FROZEN_DATE_TIME) -def test_get_letter_pdf_gets_pdf_from_correct_bucket( - sample_precompiled_letter_notification_using_test_key, - bucket_config_name, - filename_format, -): - if bucket_config_name == "LETTERS_PDF_BUCKET_NAME": - sample_precompiled_letter_notification_using_test_key.key_type = KEY_TYPE_NORMAL - - bucket_name = current_app.config[bucket_config_name] - filename = datetime.utcnow().strftime(filename_format) - conn = boto3.resource("s3", region_name="eu-west-1") - conn.create_bucket(Bucket=bucket_name) - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - ret = get_letter_pdf(sample_precompiled_letter_notification_using_test_key) - - assert ret == b"pdf_content" - - -@pytest.mark.parametrize( - "is_precompiled_letter,bucket_config_name", - [(False, "LETTERS_PDF_BUCKET_NAME"), (True, "LETTERS_SCAN_BUCKET_NAME")], -) -def test_upload_letter_pdf_to_correct_bucket(sample_letter_notification, mocker, is_precompiled_letter, bucket_config_name): - if is_precompiled_letter: - sample_letter_notification.template.hidden = True - sample_letter_notification.template.name = PRECOMPILED_TEMPLATE_NAME - - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - filename = get_letter_pdf_filename( - reference=sample_letter_notification.reference, - crown=sample_letter_notification.service.crown, - is_scan_letter=is_precompiled_letter, - ) - - upload_letter_pdf(sample_letter_notification, b"\x00\x01", precompiled=is_precompiled_letter) - - mock_s3.assert_called_once_with( - bucket_name=current_app.config[bucket_config_name], - file_location=filename, - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@pytest.mark.parametrize("postage,expected_postage", [("second", 2), ("first", 1)]) -def test_upload_letter_pdf_uses_postage_from_notification(sample_letter_template, mocker, postage, expected_postage): - letter_notification = save_notification(create_notification(template=sample_letter_template, postage=postage)) - mock_s3 = mocker.patch("app.letters.utils.s3upload") - - filename = get_letter_pdf_filename( - reference=letter_notification.reference, - crown=letter_notification.service.crown, - is_scan_letter=False, - postage=letter_notification.postage, - ) - - upload_letter_pdf(letter_notification, b"\x00\x01", precompiled=False) - - mock_s3.assert_called_once_with( - bucket_name=current_app.config["LETTERS_PDF_BUCKET_NAME"], - file_location=filename, - filedata=b"\x00\x01", - region=current_app.config["AWS_REGION"], - ) - - -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_move_failed_pdf_error(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3", region_name="eu-west-1") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - move_failed_pdf(filename, ScanErrorType.ERROR) - - assert "ERROR/" + filename in [o.key for o in bucket.objects.all()] - assert filename not in [o.key for o in bucket.objects.all()] - - -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_move_failed_pdf_scan_failed(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3", region_name="eu-west-1") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - move_failed_pdf(filename, ScanErrorType.FAILURE) - - assert "FAILURE/" + filename in [o.key for o in bucket.objects.all()] - assert filename not in [o.key for o in bucket.objects.all()] - - -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_copy_redaction_failed_pdf(notify_api): - filename = "test.pdf" - bucket_name = current_app.config["LETTERS_SCAN_BUCKET_NAME"] - - conn = boto3.resource("s3", region_name="eu-west-1") - bucket = conn.create_bucket(Bucket=bucket_name) - - s3 = boto3.client("s3", region_name="eu-west-1") - s3.put_object(Bucket=bucket_name, Key=filename, Body=b"pdf_content") - - copy_redaction_failed_pdf(filename) - - assert "REDACTION_FAILURE/" + filename in [o.key for o in bucket.objects.all()] - assert filename in [o.key for o in bucket.objects.all()] - - -@pytest.mark.parametrize( - "freeze_date, expected_folder_name", - [ - ("2018-04-01 17:50:00", "2018-04-02/"), - ("2018-07-02 16:29:00", "2018-07-02/"), - ("2018-07-02 16:30:00", "2018-07-02/"), - ("2018-07-02 16:31:00", "2018-07-03/"), - ("2018-01-02 16:31:00", "2018-01-02/"), - ("2018-01-02 17:31:00", "2018-01-03/"), - ("2018-07-02 22:30:00", "2018-07-03/"), - ("2018-07-02 23:30:00", "2018-07-03/"), - ("2018-07-03 00:30:00", "2018-07-03/"), - ("2018-01-02 22:30:00", "2018-01-03/"), - ("2018-01-02 23:30:00", "2018-01-03/"), - ("2018-01-03 00:30:00", "2018-01-03/"), - ], -) -@pytest.mark.skip(reason="Letter feature") -def test_get_folder_name_in_british_summer_time(notify_api, freeze_date, expected_folder_name): - with freeze_time(freeze_date): - now = datetime.utcnow() - folder_name = get_folder_name(_now=now, is_test_or_scan_letter=False) - assert folder_name == expected_folder_name - - -def test_get_folder_name_returns_empty_string_for_test_letter(): - assert "" == get_folder_name(datetime.utcnow(), is_test_or_scan_letter=True) - - -@freeze_time("2017-07-07 20:00:00") -@pytest.mark.skip(reason="Letter feature") -def test_letter_print_day_returns_today_if_letter_was_printed_after_1730_yesterday(): - created_at = datetime(2017, 7, 6, 17, 30) - assert letter_print_day(created_at) == "today" - - -@freeze_time("2017-07-07 16:30:00") -def test_letter_print_day_returns_today_if_letter_was_printed_today(): - created_at = datetime(2017, 7, 7, 12, 0) - assert letter_print_day(created_at) == "today" - - -@pytest.mark.parametrize( - "created_at, formatted_date", - [ - (datetime(2017, 7, 5, 16, 30), "on 6 July"), - (datetime(2017, 7, 6, 16, 29), "on 6 July"), - (datetime(2016, 8, 8, 10, 00), "on 8 August"), - (datetime(2016, 12, 12, 17, 29), "on 12 December"), - (datetime(2016, 12, 12, 17, 30), "on 13 December"), - ], -) -@freeze_time("2017-07-07 16:30:00") -@pytest.mark.skip(reason="Letter feature") -def test_letter_print_day_returns_formatted_date_if_letter_printed_before_1730_yesterday(created_at, formatted_date): - assert letter_print_day(created_at) == formatted_date diff --git a/tests/app/letters/test_returned_letters.py b/tests/app/letters/test_returned_letters.py deleted file mode 100644 index ca8ab75960..0000000000 --- a/tests/app/letters/test_returned_letters.py +++ /dev/null @@ -1,26 +0,0 @@ -import pytest - - -@pytest.mark.parametrize( - "status, references", - [ - (200, ["1234567890ABCDEF", "1234567890ABCDEG"]), - (400, ["1234567890ABCDEFG", "1234567890ABCDEG"]), - (400, ["1234567890ABCDE", "1234567890ABCDEG"]), - (400, ["1234567890ABCDE\u26d4", "1234567890ABCDEG"]), - (400, ["NOTIFY0001234567890ABCDEF", "1234567890ABCDEG"]), - ], -) -def test_process_returned_letters(status, references, admin_request, mocker): - mock_celery = mocker.patch("app.letters.rest.process_returned_letters_list.apply_async") - - response = admin_request.post( - "letter-job.create_process_returned_letters_job", - _data={"references": references}, - _expected_status=status, - ) - - if status != 200: - assert "{} does not match".format(references[0]) in response["errors"][0]["message"] - else: - mock_celery.assert_called_once_with([references], queue="database-tasks") diff --git a/tests/app/notifications/rest/test_callbacks.py b/tests/app/notifications/rest/test_callbacks.py index a13818c545..198f162050 100644 --- a/tests/app/notifications/rest/test_callbacks.py +++ b/tests/app/notifications/rest/test_callbacks.py @@ -16,6 +16,7 @@ def test_dvla_callback_returns_400_with_invalid_request(client): assert response.status_code == 400 +@pytest.mark.skip(reason="Deprecated: LETTER CODE") def test_dvla_callback_autoconfirms_subscription(client, mocker): autoconfirm_mock = mocker.patch("app.notifications.notifications_letter_callback.autoconfirm_subscription") @@ -25,6 +26,7 @@ def test_dvla_callback_autoconfirms_subscription(client, mocker): assert autoconfirm_mock.called +@pytest.mark.skip(reason="Deprecated: LETTER CODE") def test_dvla_callback_autoconfirm_does_not_call_update_letter_notifications_task(client, mocker): autoconfirm_mock = mocker.patch("app.notifications.notifications_letter_callback.autoconfirm_subscription") update_task = mocker.patch("app.notifications.notifications_letter_callback.update_letter_notifications_statuses.apply_async") @@ -37,6 +39,7 @@ def test_dvla_callback_autoconfirm_does_not_call_update_letter_notifications_tas assert not update_task.called +@pytest.mark.skip(reason="Deprecated: LETTER CODE") def test_dvla_callback_calls_does_not_update_letter_notifications_task_with_invalid_file_type(client, mocker): update_task = mocker.patch("app.notifications.notifications_letter_callback.update_letter_notifications_statuses.apply_async") @@ -47,6 +50,7 @@ def test_dvla_callback_calls_does_not_update_letter_notifications_task_with_inva assert not update_task.called +@pytest.mark.skip(reason="Deprecated: LETTER CODE") @pytest.mark.parametrize("filename", ["Notify-20170411153023-rs.txt", "Notify-20170411153023-rsp.txt"]) def test_dvla_rs_and_rsp_txt_file_callback_calls_update_letter_notifications_task(client, mocker, filename): update_task = mocker.patch("app.notifications.notifications_letter_callback.update_letter_notifications_statuses.apply_async") @@ -62,6 +66,7 @@ def test_dvla_rs_and_rsp_txt_file_callback_calls_update_letter_notifications_tas daily_sorted_counts_task.assert_called_with([filename], queue="notify-internal-tasks") +@pytest.mark.skip(reason="Deprecated: LETTER CODE") def test_dvla_ack_calls_does_not_call_letter_notifications_task(client, mocker): update_task = mocker.patch("app.notifications.notifications_letter_callback.update_letter_notifications_statuses.apply_async") daily_sorted_counts_task = mocker.patch( diff --git a/tests/app/notifications/rest/test_send_notification.py b/tests/app/notifications/rest/test_send_notification.py index aafd2b102c..a1cf1f0d52 100644 --- a/tests/app/notifications/rest/test_send_notification.py +++ b/tests/app/notifications/rest/test_send_notification.py @@ -9,6 +9,7 @@ from notifications_utils import SMS_CHAR_COUNT_LIMIT import app +from app.config import QueueNames from app.dao import notifications_dao from app.dao.api_key_dao import save_model_api_key from app.dao.services_dao import dao_update_service @@ -27,18 +28,21 @@ Template, ) from app.utils import get_document_url -from app.v2.errors import RateLimitError, TooManyRequestsError +from app.v2.errors import ( + RateLimitError, + TooManyRequestsError, + TrialServiceTooManyEmailRequestsError, +) from tests import create_authorization_header -from tests.app.conftest import sample_api_key as create_sample_api_key -from tests.app.conftest import sample_email_template as create_sample_email_template -from tests.app.conftest import sample_notification as create_sample_notification -from tests.app.conftest import sample_service -from tests.app.conftest import sample_service as create_sample_service -from tests.app.conftest import sample_service_safelist as create_sample_service_safelist -from tests.app.conftest import sample_template as create_sample_template from tests.app.conftest import ( - sample_template_without_email_permission, - sample_template_without_sms_permission, + create_sample_api_key, + create_sample_email_template, + create_sample_notification, + create_sample_service, + create_sample_service_safelist, + create_sample_template, + create_sample_template_without_email_permission, + create_sample_template_without_sms_permission, ) from tests.app.db import create_reply_to_email, create_service @@ -132,7 +136,7 @@ def test_send_notification_with_placeholders_replaced(notify_api, sample_email_t notification_id = response_data["notification"]["id"] data.update({"template_version": sample_email_template_with_placeholders.version}) - mocked.assert_called_once_with([notification_id], queue="send-email-tasks") + mocked.assert_called_once_with([notification_id], queue=QueueNames.SEND_EMAIL_MEDIUM) assert response.status_code == 201 assert response_data["body"] == "Hello Jo\nThis is an email from GOV.UK" assert response_data["subject"] == "Jo" @@ -322,7 +326,7 @@ def test_should_allow_valid_sms_notification(notify_api, sample_template, mocker response_data = json.loads(response.data)["data"] notification_id = response_data["notification"]["id"] - mocked.assert_called_once_with([notification_id], queue="send-sms-tasks") + mocked.assert_called_once_with([notification_id], queue=QueueNames.SEND_SMS_MEDIUM) assert response.status_code == 201 assert notification_id assert "subject" not in response_data @@ -370,7 +374,7 @@ def test_should_allow_valid_email_notification(notify_api, sample_email_template response_data = json.loads(response.get_data(as_text=True))["data"] notification_id = response_data["notification"]["id"] app.celery.provider_tasks.deliver_email.apply_async.assert_called_once_with( - [notification_id], queue="send-email-tasks" + [notification_id], queue=QueueNames.SEND_EMAIL_MEDIUM ) assert response.status_code == 201 @@ -385,7 +389,7 @@ def test_should_block_api_call_if_over_day_limit_for_live_service(notify_db, not with notify_api.test_request_context(): with notify_api.test_client() as client: mocker.patch( - "app.notifications.validators.check_service_over_daily_message_limit", + "app.notifications.validators.check_service_over_api_rate_limit_and_update_rate", side_effect=TooManyRequestsError(1), ) mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") @@ -417,13 +421,14 @@ def test_should_block_api_call_if_over_day_limit_for_live_service(notify_db, not def test_should_block_api_call_if_over_day_limit_for_restricted_service(notify_db, notify_db_session, notify_api, mocker): with notify_api.test_request_context(): with notify_api.test_client() as client: - mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") mocker.patch( - "app.notifications.validators.check_service_over_daily_message_limit", - side_effect=TooManyRequestsError(1), + "app.notifications.validators.check_email_daily_limit", + side_effect=TrialServiceTooManyEmailRequestsError(1), ) service = create_sample_service(notify_db, notify_db_session, limit=1, restricted=True) + create_sample_service_safelist(notify_db, notify_db_session, service=service, email_address="ok@ok.com") email_template = create_sample_email_template(notify_db, notify_db_session, service=service) create_sample_notification( notify_db, @@ -461,7 +466,7 @@ def test_should_allow_api_call_if_under_day_limit_regardless_of_type( sms_template = create_sample_template(notify_db, notify_db_session, service=service) create_sample_notification(notify_db, notify_db_session, template=email_template, service=service) - data = {"to": sample_user.mobile_number, "template": str(sms_template.id)} + data = {"to": sample_user.mobile_number, "template": str(sms_template.id), "valid": "True"} auth_header = create_authorization_header(service_id=service.id) @@ -493,7 +498,7 @@ def test_should_not_return_html_in_body(notify_api, notify_db, notify_db_session assert json.loads(response.get_data(as_text=True))["data"]["body"] == "hello\nthere" -def test_should_not_send_email_if_team_api_key_and_not_a_service_user(notify_api, sample_email_template, mocker): +def test_should_not_send_email_if_team_api_key_and_not_a_service_user(notify_api, sample_email_template, sample_service, mocker): with notify_api.test_request_context(), notify_api.test_client() as client: mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") data = { @@ -515,12 +520,12 @@ def test_should_not_send_email_if_team_api_key_and_not_a_service_user(notify_api assert response.status_code == 400 assert [ - "Can’t send to this recipient using a team-only API key " + f"Can’t send to this recipient using a team-only API key (service {sample_service.id}) " f'- see {get_document_url("en", "keys.html#team-and-safelist")}' ] == json_resp["message"]["to"] -def test_should_not_send_sms_if_team_api_key_and_not_a_service_user(notify_api, sample_template, mocker): +def test_should_not_send_sms_if_team_api_key_and_not_a_service_user(notify_api, sample_template, sample_service, mocker): with notify_api.test_request_context(), notify_api.test_client() as client: mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") @@ -542,7 +547,7 @@ def test_should_not_send_sms_if_team_api_key_and_not_a_service_user(notify_api, assert response.status_code == 400 assert [ - "Can’t send to this recipient using a team-only API key " + f"Can’t send to this recipient using a team-only API key (service {sample_service.id}) " f'- see {get_document_url("en", "keys.html#team-and-safelist")}' ] == json_resp["message"]["to"] @@ -563,7 +568,7 @@ def test_should_send_email_if_team_api_key_and_a_service_user(client, sample_ema headers=[("Content-Type", "application/json"), auth_header], ) - app.celery.provider_tasks.deliver_email.apply_async.assert_called_once_with([fake_uuid], queue="send-email-tasks") + app.celery.provider_tasks.deliver_email.apply_async.assert_called_once_with([fake_uuid], queue=QueueNames.SEND_EMAIL_MEDIUM) assert response.status_code == 201 @@ -654,13 +659,13 @@ def test_should_send_sms_if_team_api_key_and_a_service_user(client, sample_templ ], ) - app.celery.provider_tasks.deliver_sms.apply_async.assert_called_once_with([fake_uuid], queue="send-sms-tasks") + app.celery.provider_tasks.deliver_sms.apply_async.assert_called_once_with([fake_uuid], queue=QueueNames.SEND_SMS_MEDIUM) assert response.status_code == 201 @pytest.mark.parametrize( "template_type,queue_name", - [(SMS_TYPE, "send-sms-tasks"), (EMAIL_TYPE, "send-email-tasks")], + [(SMS_TYPE, QueueNames.SEND_SMS_MEDIUM), (EMAIL_TYPE, QueueNames.SEND_EMAIL_MEDIUM)], ) def test_should_persist_notification( client, @@ -710,7 +715,7 @@ def test_should_persist_notification( @pytest.mark.parametrize( "template_type,queue_name", - [(SMS_TYPE, "send-sms-tasks"), (EMAIL_TYPE, "send-email-tasks")], + [(SMS_TYPE, QueueNames.SEND_SMS_MEDIUM), (EMAIL_TYPE, QueueNames.SEND_EMAIL_MEDIUM)], ) def test_should_delete_notification_and_return_error_if_sqs_fails( client, @@ -850,7 +855,7 @@ def test_should_not_send_notification_to_non_safelist_recipient_in_trial_mode( ("Can’t send to this recipient when service is in trial mode " f'– see {get_document_url("en", "keys.html#live")}') if key_type == KEY_TYPE_NORMAL else ( - "Can’t send to this recipient using a team-only API key " + f"Can’t send to this recipient using a team-only API key (service {service.id}) " f'- see {get_document_url("en", "keys.html#team-and-safelist")}' ) ) @@ -1025,7 +1030,11 @@ def test_send_notification_uses_appropriate_queue_when_template_has_process_type notification_id = response_data["notification"]["id"] assert response.status_code == 201 - mocked.assert_called_once_with([notification_id], queue=f"{process_type}-tasks") + if notification_type == SMS_TYPE: + expected_queue = QueueNames.SEND_SMS_HIGH if process_type == "priority" else QueueNames.SEND_SMS_LOW + else: + expected_queue = QueueNames.SEND_EMAIL_HIGH if process_type == "priority" else QueueNames.SEND_EMAIL_LOW + mocked.assert_called_once_with([notification_id], queue=expected_queue) @pytest.mark.parametrize("notification_type, send_to", [("sms", "6502532222"), ("email", "sample@email.com")]) @@ -1077,7 +1086,7 @@ def test_should_allow_store_original_number_on_sms_notification(client, sample_t response_data = json.loads(response.data)["data"] notification_id = response_data["notification"]["id"] - mocked.assert_called_once_with([notification_id], queue="send-sms-tasks") + mocked.assert_called_once_with([notification_id], queue=QueueNames.SEND_SMS_MEDIUM) assert response.status_code == 201 assert notification_id notifications = Notification.query.all() @@ -1108,7 +1117,7 @@ def test_should_not_allow_international_number_on_sms_notification(client, sampl def test_should_allow_international_number_on_sms_notification(client, notify_db, notify_db_session, mocker): mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - service = sample_service(notify_db, notify_db_session, permissions=[INTERNATIONAL_SMS_TYPE, SMS_TYPE]) + service = create_sample_service(notify_db, notify_db_session, permissions=[INTERNATIONAL_SMS_TYPE, SMS_TYPE]) template = create_sample_template(notify_db, notify_db_session, service=service) data = {"to": "+20-12-1234-1234", "template": str(template.id)} @@ -1128,12 +1137,12 @@ def test_should_allow_international_number_on_sms_notification(client, notify_db "template_factory, to, expected_error", [ ( - sample_template_without_sms_permission, + create_sample_template_without_sms_permission, "+16502532222", "Cannot send text messages", ), ( - sample_template_without_email_permission, + create_sample_template_without_email_permission, "notify@digital.cabinet-office.gov.uk", "Cannot send emails", ), diff --git a/tests/app/notifications/test_callbacks.py b/tests/app/notifications/test_callbacks.py index 171fc42789..2724fd07af 100644 --- a/tests/app/notifications/test_callbacks.py +++ b/tests/app/notifications/test_callbacks.py @@ -1,11 +1,11 @@ from datetime import datetime -from app import DATETIME_FORMAT, encryption +from app import DATETIME_FORMAT, signer_complaint, signer_delivery_status from app.notifications.callbacks import ( create_complaint_callback_data, create_delivery_status_callback_data, ) -from tests.app.conftest import sample_notification as create_sample_notification +from tests.app.conftest import create_sample_notification from tests.app.db import create_complaint, create_service_callback_api @@ -23,13 +23,14 @@ def test_create_delivery_status_callback_data( ) callback_api = create_service_callback_api(service=sample_email_template.service, url="https://original_url.com") - assert encryption.decrypt(create_delivery_status_callback_data(notification, callback_api)) == { + assert signer_delivery_status.verify(create_delivery_status_callback_data(notification, callback_api)) == { "notification_client_reference": notification.client_reference, "notification_created_at": notification.created_at.strftime(DATETIME_FORMAT), "notification_id": str(notification.id), "notification_provider_response": notification.provider_response, "notification_sent_at": notification.sent_at.strftime(DATETIME_FORMAT), "notification_status": notification.status, + "notification_status_description": notification.formatted_status, "notification_to": notification.to, "notification_type": notification.notification_type, "notification_updated_at": notification.updated_at, @@ -53,7 +54,9 @@ def test_create_complaint_callback_data( complaint = create_complaint(notification=notification, service=notification.service) callback_api = create_service_callback_api(service=sample_email_template.service, url="https://original_url.com") - assert encryption.decrypt(create_complaint_callback_data(complaint, notification, callback_api, "recipient@example.com")) == { + assert signer_complaint.verify( + create_complaint_callback_data(complaint, notification, callback_api, "recipient@example.com") + ) == { "complaint_id": str(complaint.id), "notification_id": str(notification.id), "reference": notification.client_reference, diff --git a/tests/app/notifications/test_notifications_ses_callback.py b/tests/app/notifications/test_notifications_ses_callback.py index 2ae5fcabf3..196eaec1b6 100644 --- a/tests/app/notifications/test_notifications_ses_callback.py +++ b/tests/app/notifications/test_notifications_ses_callback.py @@ -1,4 +1,5 @@ from datetime import datetime +from unittest import mock import pytest from flask import json @@ -8,14 +9,30 @@ ses_complaint_callback, ses_complaint_callback_malformed_message_id, ses_complaint_callback_with_missing_complaint_type, + ses_complaint_callback_with_subtype, + ses_hard_bounce_callback, + ses_soft_bounce_callback, ) from app.dao.notifications_dao import get_notification_by_id -from app.models import Complaint +from app.models import ( + NOTIFICATION_HARD_BOUNCE, + NOTIFICATION_HARD_GENERAL, + NOTIFICATION_HARD_NOEMAIL, + NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST, + NOTIFICATION_HARD_SUPPRESSED, + NOTIFICATION_SOFT_ATTACHMENTREJECTED, + NOTIFICATION_SOFT_BOUNCE, + NOTIFICATION_SOFT_CONTENTREJECTED, + NOTIFICATION_SOFT_GENERAL, + NOTIFICATION_SOFT_MAILBOXFULL, + NOTIFICATION_SOFT_MESSAGETOOLARGE, + Complaint, +) from app.notifications.notifications_ses_callback import ( get_aws_responses, handle_complaint, ) -from tests.app.conftest import sample_notification as create_sample_notification +from tests.app.conftest import create_sample_notification from tests.app.db import ( create_notification, create_notification_history, @@ -34,6 +51,7 @@ "success": True, "notification_status": "delivered", "provider_response": None, + "bounce_response": mock.ANY, }, ), ( @@ -44,56 +62,87 @@ "success": True, "notification_status": "delivered", "provider_response": None, + "bounce_response": mock.ANY, }, ), ( "Bounce", - {"bounceType": "Permanent", "bounceSubType": "NoEmail"}, + { + "bounceType": "Permanent", + "bounceSubType": "NoEmail", + "feedbackId": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "timestamp": "2017-11-17T12:14:05.131Z", + }, { "message": "Hard bounced", "success": False, "notification_status": "permanent-failure", "provider_response": None, + "bounce_response": mock.ANY, }, ), ( "Bounce", - {"bounceType": "Permanent", "bounceSubType": "Suppressed"}, + { + "bounceType": "Permanent", + "bounceSubType": "Suppressed", + "feedbackId": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "timestamp": "2017-11-17T12:14:05.131Z", + }, { "message": "Hard bounced", "success": False, "notification_status": "permanent-failure", "provider_response": "The email address is on our email provider suppression list", + "bounce_response": mock.ANY, }, ), ( "Bounce", - {"bounceType": "Permanent", "bounceSubType": "OnAccountSuppressionList"}, + { + "bounceType": "Permanent", + "bounceSubType": "OnAccountSuppressionList", + "feedbackId": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "timestamp": "2017-11-17T12:14:05.131Z", + }, { "message": "Hard bounced", "success": False, "notification_status": "permanent-failure", "provider_response": "The email address is on the GC Notify suppression list", + "bounce_response": mock.ANY, }, ), ( "Bounce", - {"bounceType": "Transient", "bounceSubType": "AttachmentRejected"}, + { + "bounceType": "Transient", + "bounceSubType": "AttachmentRejected", + "feedbackId": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "timestamp": "2017-11-17T12:14:05.131Z", + }, { "message": "Soft bounced", "success": False, "notification_status": "temporary-failure", "provider_response": "The email was rejected because of its attachments", + "bounce_response": mock.ANY, }, ), ( "Bounce", - {"bounceType": "Transient", "bounceSubType": "MailboxFull"}, + { + "bounceType": "Transient", + "bounceSubType": "MailboxFull", + "feedbackId": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "timestamp": "2017-11-17T12:14:05.131Z", + }, { "message": "Soft bounced", "success": False, "notification_status": "temporary-failure", "provider_response": None, + "bounce_response": mock.ANY, }, ), ], @@ -182,3 +231,146 @@ def test_process_ses_results_in_complaint_save_complaint_with_null_complaint_typ assert len(complaints) == 1 assert complaints[0].notification_id == notification.id assert not complaints[0].complaint_type + + +def test_account_suppression_list_complaint_updates_notification_status(sample_email_template): + notification = save_notification(create_notification(template=sample_email_template, reference="ref1")) + assert get_notification_by_id(notification.id).status == "created" + + handle_complaint(json.loads(ses_complaint_callback_with_subtype("OnAccountSuppressionList")["Message"])) + complaints = Complaint.query.all() + + assert len(complaints) == 1 + assert complaints[0].notification_id == notification.id + assert get_notification_by_id(notification.id).status == "permanent-failure" + + +def test_regular_complaint_does_not_update_notification_status(sample_email_template): + notification = save_notification(create_notification(template=sample_email_template, reference="ref1")) + status = get_notification_by_id(notification.id).status + + handle_complaint(json.loads(ses_complaint_callback_with_missing_complaint_type()["Message"])) + complaints = Complaint.query.all() + + assert len(complaints) == 1 + assert complaints[0].notification_id == notification.id + assert get_notification_by_id(notification.id).status == status + + +class TestBounceRates: + @pytest.mark.parametrize( + "bounceType, bounceSubType, expected_bounce_classification", + [ + ( + "Undetermined", + "Undetermined", + { + "feedback_type": NOTIFICATION_SOFT_BOUNCE, + "feedback_subtype": NOTIFICATION_SOFT_GENERAL, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Permanent", + "General", + { + "feedback_type": NOTIFICATION_HARD_BOUNCE, + "feedback_subtype": NOTIFICATION_HARD_GENERAL, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Permanent", + "NoEmail", + { + "feedback_type": NOTIFICATION_HARD_BOUNCE, + "feedback_subtype": NOTIFICATION_HARD_NOEMAIL, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Permanent", + "Suppressed", + { + "feedback_type": NOTIFICATION_HARD_BOUNCE, + "feedback_subtype": NOTIFICATION_HARD_SUPPRESSED, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Permanent", + "OnAccountSuppressionList", + { + "feedback_type": NOTIFICATION_HARD_BOUNCE, + "feedback_subtype": NOTIFICATION_HARD_ONACCOUNTSUPPRESSIONLIST, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Transient", + "General", + { + "feedback_type": NOTIFICATION_SOFT_BOUNCE, + "feedback_subtype": NOTIFICATION_SOFT_GENERAL, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Transient", + "MailboxFull", + { + "feedback_type": NOTIFICATION_SOFT_BOUNCE, + "feedback_subtype": NOTIFICATION_SOFT_MAILBOXFULL, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Transient", + "MessageTooLarge", + { + "feedback_type": NOTIFICATION_SOFT_BOUNCE, + "feedback_subtype": NOTIFICATION_SOFT_MESSAGETOOLARGE, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Transient", + "ContentRejected", + { + "feedback_type": NOTIFICATION_SOFT_BOUNCE, + "feedback_subtype": NOTIFICATION_SOFT_CONTENTREJECTED, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ( + "Transient", + "AttachmentRejected", + { + "feedback_type": NOTIFICATION_SOFT_BOUNCE, + "feedback_subtype": NOTIFICATION_SOFT_ATTACHMENTREJECTED, + "ses_feedback_id": "0102015fc9e676fb-12341234-1234-1234-1234-9301e86a4fa8-000000", + "ses_feedback_date": "2017-11-17T12:14:05.131Z", + }, + ), + ], + ) + def test_bounce_types(self, notify_api, bounceType, bounceSubType, expected_bounce_classification): + if bounceType == "Permanent": + bounce_message = json.loads(ses_hard_bounce_callback(reference="ref", bounce_subtype=bounceSubType)["Message"]) + elif bounceType == "Transient" or bounceType == "Undetermined": + bounce_message = json.loads(ses_soft_bounce_callback(reference="ref", bounce_subtype=bounceSubType)["Message"]) + if bounceType == "Undetermined": + bounce_message["bounce"]["bounceType"] = "Undetermined" + + with notify_api.test_request_context(): + # test = get_aws_responses(bounce_message)["bounce_response"] + assert get_aws_responses(bounce_message)["bounce_response"] == expected_bounce_classification diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index badcb878cd..eaa0c00da0 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -1,5 +1,6 @@ import datetime import uuid +from unittest.mock import call import pytest from boto3.exceptions import Boto3Error @@ -10,608 +11,1254 @@ ) from sqlalchemy.exc import SQLAlchemyError +from app.celery.utils import CeleryParams +from app.config import QueueNames +from app.dao.service_sms_sender_dao import dao_update_service_sms_sender from app.models import ( + BULK, LETTER_TYPE, + NORMAL, + PRIORITY, + ApiKey, Notification, NotificationHistory, ScheduledNotification, Template, ) from app.notifications.process_notifications import ( + choose_queue, create_content_for_notification, + db_save_and_send_notification, persist_notification, + persist_notifications, persist_scheduled_notification, send_notification_to_queue, simulated_recipient, + transform_notification, ) from app.v2.errors import BadRequestError -from tests.app.conftest import sample_api_key as create_api_key -from tests.app.db import create_service, create_template - - -def test_create_content_for_notification_passes(sample_email_template): - template = Template.query.get(sample_email_template.id) - content = create_content_for_notification(template, None) - assert str(content) == template.content - - -def test_create_content_for_notification_with_placeholders_passes( - sample_template_with_placeholders, -): - template = Template.query.get(sample_template_with_placeholders.id) - content = create_content_for_notification(template, {"name": "Bobby"}) - assert content.content == template.content - assert "Bobby" in str(content) - - -def test_create_content_for_notification_fails_with_missing_personalisation( - sample_template_with_placeholders, -): - template = Template.query.get(sample_template_with_placeholders.id) - with pytest.raises(BadRequestError): - create_content_for_notification(template, None) - - -def test_create_content_for_notification_allows_additional_personalisation( - sample_template_with_placeholders, -): - template = Template.query.get(sample_template_with_placeholders.id) - create_content_for_notification(template, {"name": "Bobby", "Additional placeholder": "Data"}) - - -@freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_creates_and_save_to_db(sample_template, sample_api_key, sample_job, mocker): - mocked_redis = mocker.patch("app.notifications.process_notifications.redis_store.get") - - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 - notification = persist_notification( - template_id=sample_template.id, - template_version=sample_template.version, - recipient="+16502532222", - service=sample_template.service, - personalisation={}, - notification_type="sms", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - job_id=sample_job.id, - job_row_number=100, - reference="ref", - reply_to_text=sample_template.service.get_default_sms_sender(), - ) - - assert Notification.query.get(notification.id) is not None - - notification_from_db = Notification.query.one() - - assert notification_from_db.id == notification.id - assert notification_from_db.template_id == notification.template_id - assert notification_from_db.template_version == notification.template_version - assert notification_from_db.api_key_id == notification.api_key_id - assert notification_from_db.key_type == notification.key_type - assert notification_from_db.key_type == notification.key_type - assert notification_from_db.billable_units == notification.billable_units - assert notification_from_db.notification_type == notification.notification_type - assert notification_from_db.created_at == notification.created_at - assert not notification_from_db.sent_at - assert notification_from_db.updated_at == notification.updated_at - assert notification_from_db.status == notification.status - assert notification_from_db.reference == notification.reference - assert notification_from_db.client_reference == notification.client_reference - assert notification_from_db.created_by_id == notification.created_by_id - assert notification_from_db.reply_to_text == sample_template.service.get_default_sms_sender() - - mocked_redis.assert_called_once_with(str(sample_template.service_id) + "-2016-01-01-count") - - -def test_persist_notification_throws_exception_when_missing_template(sample_api_key): - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 - with pytest.raises(SQLAlchemyError): - persist_notification( - template_id=None, - template_version=None, - recipient="+16502532222", - service=sample_api_key.service, - personalisation=None, - notification_type="sms", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, +from tests.app.conftest import create_sample_api_key +from tests.app.db import create_service, create_service_sms_sender, create_template +from tests.conftest import set_config + + +class TestContentCreation: + def test_create_content_for_notification_passes(self, sample_email_template): + template = Template.query.get(sample_email_template.id) + content = create_content_for_notification(template, None) + assert str(content) == template.content + + def test_create_content_for_notification_with_placeholders_passes( + self, + sample_template_with_placeholders, + ): + template = Template.query.get(sample_template_with_placeholders.id) + content = create_content_for_notification(template, {"name": "Bobby"}) + assert content.content == template.content + assert "Bobby" in str(content) + + def test_create_content_for_notification_fails_with_missing_personalisation( + self, + sample_template_with_placeholders, + ): + template = Template.query.get(sample_template_with_placeholders.id) + with pytest.raises(BadRequestError): + create_content_for_notification(template, None) + + def test_create_content_for_notification_allows_additional_personalisation( + self, + sample_template_with_placeholders, + ): + template = Template.query.get(sample_template_with_placeholders.id) + create_content_for_notification(template, {"name": "Bobby", "Additional placeholder": "Data"}) + + +class TestPersistNotification: + def test_persists_notification_throws_exception_when_missing_template(self, sample_api_key): + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + with pytest.raises(SQLAlchemyError): + persist_notifications( + [ + dict( + template_id=None, + template_version=None, + recipient="+16502532222", + service=sample_api_key.service, + personalisation=None, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + ) + ] + ) + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + + def test_persist_notifications_does_not_increment_cache_if_test_key( + self, notify_db, notify_db_session, sample_template, sample_job, mocker + ): + api_key = create_sample_api_key( + notify_db=notify_db, + notify_db_session=notify_db_session, + service=sample_template.service, + key_type="test", ) - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 - - -def test_cache_is_not_incremented_on_failure_to_persist_notification(sample_api_key, mocker): - mocked_redis = mocker.patch("app.redis_store.get") - mock_service_template_cache = mocker.patch("app.redis_store.get_all_from_hash") - with pytest.raises(SQLAlchemyError): - persist_notification( - template_id=None, - template_version=None, - recipient="+16502532222", - service=sample_api_key.service, - personalisation=None, - notification_type="sms", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, + mocker.patch("app.notifications.process_notifications.redis_store.get", return_value="cache") + mocker.patch( + "app.notifications.process_notifications.redis_store.get_all_from_hash", + return_value="cache", + ) + daily_limit_cache = mocker.patch("app.notifications.process_notifications.redis_store.incr") + template_usage_cache = mocker.patch("app.notifications.process_notifications.redis_store.increment_hash_value") + mocker.patch("app.notifications.process_notifications.dao_get_template_by_id", return_value=sample_template) + mocker.patch("app.notifications.process_notifications.dao_fetch_service_by_id", return_value=sample_template.service) + mocker.patch("app.notifications.process_notifications.choose_queue", return_value="sms_normal_queue") + + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + persist_notifications( + [ + dict( + template_id=sample_template.id, + template_version=sample_template.version, + recipient="+16502532222", + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=api_key.id, + key_type=api_key.key_type, + job_id=sample_job.id, + job_row_number=100, + reference="ref", + ) + ] + ) + assert Notification.query.count() == 1 + assert not daily_limit_cache.called + assert not template_usage_cache.called + + @freeze_time("2016-01-01 11:09:00.061258") + def test_persist_notifications_with_optionals(self, client, sample_job, sample_api_key, mocker, sample_template): + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + + mocked_redis = mocker.patch("app.notifications.process_notifications.redis_store.get") + mocker.patch("app.notifications.process_notifications.dao_get_template_by_id", return_value=sample_template) + mocker.patch("app.notifications.process_notifications.dao_fetch_service_by_id", return_value=sample_template.service) + mocker.patch("app.notifications.process_notifications.choose_queue", return_value="sms_normal_queue") + n_id = uuid.uuid4() + created_at = datetime.datetime(2016, 11, 11, 16, 8, 18) + + persist_notifications( + [ + dict( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient="+16502532222", + service=sample_job.service, + personalisation=None, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + created_at=created_at, + job_id=sample_job.id, + job_row_number=10, + client_reference="ref from client", + notification_id=n_id, + created_by_id=sample_job.created_by_id, + ) + ] + ) + assert Notification.query.count() == 1 + assert NotificationHistory.query.count() == 0 + persisted_notification = Notification.query.all()[0] + assert persisted_notification.id == n_id + persisted_notification.job_id == sample_job.id + assert persisted_notification.job_row_number == 10 + assert persisted_notification.created_at == created_at + assert persisted_notification.client_reference == "ref from client" + assert persisted_notification.reference is None + assert persisted_notification.international is False + assert persisted_notification.phone_prefix == "1" + assert persisted_notification.rate_multiplier == 1 + assert persisted_notification.created_by_id == sample_job.created_by_id + assert not persisted_notification.reply_to_text + + expected_redis_calls = [ + call(str(sample_job.service_id) + "-2016-01-01-count"), + ] + assert mocked_redis.call_count == len(expected_redis_calls) + assert mocked_redis.call_args_list == expected_redis_calls + + @freeze_time("2016-01-01 11:09:00.061258") + def test_persist_notifications_doesnt_touch_cache_for_old_keys_that_dont_exist(self, sample_template, sample_api_key, mocker): + mock_incr = mocker.patch("app.notifications.process_notifications.redis_store.incr") + mocker.patch("app.notifications.process_notifications.redis_store.get", return_value=None) + mocker.patch( + "app.notifications.process_notifications.redis_store.get_all_from_hash", + return_value=None, + ) + mocker.patch("app.notifications.process_notifications.dao_get_template_by_id", return_value=sample_template) + mocker.patch("app.notifications.process_notifications.dao_fetch_service_by_id", return_value=sample_template.service) + persist_notifications( + [ + dict( + template_id=sample_template.id, + template_version=sample_template.version, + recipient="+16502532222", + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + reference="ref", + ) + ] + ) + mock_incr.assert_not_called() + + @freeze_time("2016-01-01 11:09:00.061258") + def test_persist_notifications_increments_cache_if_key_exists(self, sample_template, sample_api_key, mocker): + mock_incr = mocker.patch("app.notifications.process_notifications.redis_store.incr") + mocker.patch("app.notifications.process_notifications.redis_store.get", return_value=1) + mocker.patch( + "app.notifications.process_notifications.redis_store.get_all_from_hash", + return_value={sample_template.id, 1}, + ) + mocker.patch("app.notifications.process_notifications.dao_get_template_by_id", return_value=sample_template) + mocker.patch("app.notifications.process_notifications.dao_fetch_service_by_id", return_value=sample_template.service) + mocker.patch("app.notifications.process_notifications.choose_queue", return_value="sms_normal_queue") + + persist_notifications( + [ + dict( + template_id=sample_template.id, + template_version=sample_template.version, + recipient="+16502532222", + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + reference="ref2", + ) + ] ) - mocked_redis.assert_not_called() - mock_service_template_cache.assert_not_called() + mock_incr.assert_called_once_with( + str(sample_template.service_id) + "-2016-01-01-count", + ) -def test_persist_notification_does_not_increment_cache_if_test_key( - notify_db, notify_db_session, sample_template, sample_job, mocker -): - api_key = create_api_key( - notify_db=notify_db, - notify_db_session=notify_db_session, - service=sample_template.service, - key_type="test", - ) - mocker.patch("app.notifications.process_notifications.redis_store.get", return_value="cache") - mocker.patch( - "app.notifications.process_notifications.redis_store.get_all_from_hash", - return_value="cache", + @pytest.mark.parametrize( + "recipient, expected_international, expected_prefix, expected_units", + [ + ("6502532222", False, "1", 1), # NA + ("+16502532222", False, "1", 1), # NA + ("+79587714230", True, "7", 1), # Russia + ("+360623400400", True, "36", 3), + ], # Hungary ) - daily_limit_cache = mocker.patch("app.notifications.process_notifications.redis_store.incr") - template_usage_cache = mocker.patch("app.notifications.process_notifications.redis_store.increment_hash_value") - - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 - persist_notification( - template_id=sample_template.id, - template_version=sample_template.version, - recipient="+16502532222", - service=sample_template.service, - personalisation={}, - notification_type="sms", - api_key_id=api_key.id, - key_type=api_key.key_type, - job_id=sample_job.id, - job_row_number=100, - reference="ref", + def test_persist_notifications_with_international_info_stores_correct_info( + self, + sample_job, + sample_api_key, + mocker, + recipient, + expected_international, + expected_prefix, + expected_units, + ): + persist_notifications( + [ + dict( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient=recipient, + service=sample_job.service, + personalisation=None, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + job_row_number=10, + client_reference="ref from client", + ) + ] + ) + persisted_notification = Notification.query.all()[0] + + assert persisted_notification.international is expected_international + assert persisted_notification.phone_prefix == expected_prefix + assert persisted_notification.rate_multiplier == expected_units + + def test_persist_notification_with_international_info_does_not_store_for_email(self, sample_job, sample_api_key, mocker): + persist_notifications( + [ + dict( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient="foo@bar.com", + service=sample_job.service, + personalisation=None, + notification_type="email", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + job_row_number=10, + client_reference="ref from client", + ) + ] + ) + persisted_notification = Notification.query.all()[0] + + assert persisted_notification.international is False + assert persisted_notification.phone_prefix is None + assert persisted_notification.rate_multiplier is None + + @pytest.mark.parametrize( + "recipient, expected_recipient_normalised", + [ + ("6502532222", "+16502532222"), + (" 6502532223", "+16502532223"), + ("6502532223", "+16502532223"), + ], ) + def test_persist_sms_notifications_stores_normalised_number( + self, sample_job, sample_api_key, mocker, recipient, expected_recipient_normalised + ): + persist_notifications( + [ + dict( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient=recipient, + service=sample_job.service, + personalisation=None, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + ) + ] + ) + persisted_notification = Notification.query.all()[0] + + assert persisted_notification.to == recipient + assert persisted_notification.normalised_to == expected_recipient_normalised - assert Notification.query.count() == 1 - - assert not daily_limit_cache.called - assert not template_usage_cache.called - - -@freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_with_optionals(sample_job, sample_api_key, mocker): - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 - mocked_redis = mocker.patch("app.notifications.process_notifications.redis_store.get") - n_id = uuid.uuid4() - created_at = datetime.datetime(2016, 11, 11, 16, 8, 18) - persist_notification( - template_id=sample_job.template.id, - template_version=sample_job.template.version, - recipient="+16502532222", - service=sample_job.service, - personalisation=None, - notification_type="sms", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - created_at=created_at, - job_id=sample_job.id, - job_row_number=10, - client_reference="ref from client", - notification_id=n_id, - created_by_id=sample_job.created_by_id, + @pytest.mark.parametrize( + "recipient, expected_recipient_normalised", + [("FOO@bar.com", "foo@bar.com"), ("BAR@foo.com", "bar@foo.com")], ) - assert Notification.query.count() == 1 - assert NotificationHistory.query.count() == 0 - persisted_notification = Notification.query.all()[0] - assert persisted_notification.id == n_id - persisted_notification.job_id == sample_job.id - assert persisted_notification.job_row_number == 10 - assert persisted_notification.created_at == created_at - mocked_redis.assert_called_once_with(str(sample_job.service_id) + "-2016-01-01-count") - assert persisted_notification.client_reference == "ref from client" - assert persisted_notification.reference is None - assert persisted_notification.international is False - assert persisted_notification.phone_prefix == "1" - assert persisted_notification.rate_multiplier == 1 - assert persisted_notification.created_by_id == sample_job.created_by_id - assert not persisted_notification.reply_to_text - - -@freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_doesnt_touch_cache_for_old_keys_that_dont_exist(sample_template, sample_api_key, mocker): - mock_incr = mocker.patch("app.notifications.process_notifications.redis_store.incr") - mocker.patch("app.notifications.process_notifications.redis_store.get", return_value=None) - mocker.patch( - "app.notifications.process_notifications.redis_store.get_all_from_hash", - return_value=None, + def test_persist_email_notifications_stores_normalised_email( + self, sample_job, sample_api_key, mocker, recipient, expected_recipient_normalised + ): + persist_notifications( + [ + dict( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient=recipient, + service=sample_job.service, + personalisation=None, + notification_type="email", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + ) + ] + ) + persisted_notification = Notification.query.all()[0] + + assert persisted_notification.to == recipient + assert persisted_notification.normalised_to == expected_recipient_normalised + + def test_persist_notification_with_billable_units_stores_correct_info(self, mocker, notify_db_session): + service = create_service(service_permissions=[LETTER_TYPE]) + template = create_template(service, template_type=LETTER_TYPE) + mocker.patch("app.dao.templates_dao.dao_get_template_by_id", return_value=template) + persist_notifications( + [ + dict( + template_id=template.id, + template_version=template.version, + recipient="123 Main Street", + service=template.service, + personalisation=None, + notification_type=template.template_type, + api_key_id=None, + key_type="normal", + billable_units=3, + template_postage=template.postage, + ) + ] + ) + persisted_notification = Notification.query.all()[0] + assert persisted_notification.billable_units == 3 + + def test_persist_notifications_list(self, sample_job, sample_api_key, notify_db_session): + persist_notifications( + [ + dict( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient="foo@bar.com", + service=sample_job.service, + personalisation=None, + notification_type="email", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + job_row_number=10, + client_reference="ref from client", + ), + dict( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient="foo2@bar.com", + service=sample_job.service, + personalisation=None, + notification_type="email", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + job_row_number=10, + client_reference="ref from client", + ), + ] + ) + persisted_notification = Notification.query.all() + + assert persisted_notification[0].to == "foo@bar.com" + assert persisted_notification[1].to == "foo2@bar.com" + assert persisted_notification[0].service == sample_job.service + + # Test that the api key last_used_timestamp got updated + api_key = ApiKey.query.get(sample_api_key.id) + assert api_key.last_used_timestamp is not None + + def test_persist_notifications_reply_to_text_is_original_value_if_sender_is_changed_later( + self, sample_template, sample_api_key, mocker + ): + mocker.patch("app.notifications.process_notifications.redis_store.incr") + mocker.patch("app.notifications.process_notifications.redis_store.get", return_value=1) + mocker.patch( + "app.notifications.process_notifications.redis_store.get_all_from_hash", + return_value={sample_template.id, 1}, + ) + mocker.patch("app.notifications.process_notifications.dao_get_template_by_id", return_value=sample_template) + mocker.patch("app.notifications.process_notifications.dao_fetch_service_by_id", return_value=sample_template.service) + mocker.patch("app.notifications.process_notifications.choose_queue", return_value="sms_normal_queue") + + sms_sender = create_service_sms_sender(service=sample_template.service, sms_sender="123456", is_default=False) + persist_notifications( + [ + dict( + template_id=sample_template.id, + template_version=sample_template.version, + recipient="+16502532222", + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + reference="ref2", + reply_to_text=sms_sender.sms_sender, + ) + ] + ) + persisted_notification = Notification.query.all()[0] + assert persisted_notification.reply_to_text == "123456" + + dao_update_service_sms_sender( + service_id=sample_template.service_id, + service_sms_sender_id=sms_sender.id, + is_default=sms_sender.is_default, + sms_sender="updated", + ) + persisted_notification = Notification.query.all()[0] + assert persisted_notification.reply_to_text == "123456" + + +class TestSendNotificationQueue: + @pytest.mark.parametrize( + ("research_mode, requested_queue, notification_type, key_type, reply_to_text, expected_queue, expected_task"), + [ + (True, None, "sms", "normal", None, "research-mode-tasks", "deliver_sms"), + (True, None, "email", "normal", None, "research-mode-tasks", "deliver_email"), + (True, None, "email", "team", None, "research-mode-tasks", "deliver_email"), + ( + True, + None, + "letter", + "normal", + None, + "research-mode-tasks", + "letters_pdf_tasks.create_letters_pdf", + ), + ( + True, + None, + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + "deliver_throttled_sms", + ), + (False, None, "sms", "normal", None, QueueNames.SEND_SMS_MEDIUM, "deliver_sms"), + (False, None, "email", "normal", None, QueueNames.SEND_EMAIL_MEDIUM, "deliver_email"), + (False, None, "sms", "team", None, QueueNames.SEND_SMS_MEDIUM, "deliver_sms"), + ( + False, + None, + "letter", + "normal", + None, + "create-letters-pdf-tasks", + "letters_pdf_tasks.create_letters_pdf", + ), + (False, None, "sms", "test", None, "research-mode-tasks", "deliver_sms"), + ( + False, + None, + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + "deliver_throttled_sms", + ), + ( + True, + "notify-internal-tasks", + "email", + "normal", + None, + "research-mode-tasks", + "deliver_email", + ), + ( + False, + "notify-internal-tasks", + "sms", + "normal", + None, + "notify-internal-tasks", + "deliver_sms", + ), + ( + False, + "notify-internal-tasks", + "email", + "normal", + None, + "notify-internal-tasks", + "deliver_email", + ), + ( + False, + "notify-internal-tasks", + "sms", + "test", + None, + "research-mode-tasks", + "deliver_sms", + ), + ( + False, + "notify-internal-tasks", + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + "deliver_throttled_sms", + ), + ], ) + def test_send_notification_to_queue( + self, + notify_db, + notify_db_session, + research_mode, + requested_queue, + notification_type, + key_type, + reply_to_text, + expected_queue, + expected_task, + mocker, + ): + if "." not in expected_task: + expected_task = f"provider_tasks.{expected_task}" + mocked = mocker.patch(f"app.celery.{expected_task}.apply_async") + notification = Notification( + id=uuid.uuid4(), + key_type=key_type, + notification_type=notification_type, + created_at=datetime.datetime(2016, 11, 11, 16, 8, 18), + reply_to_text=reply_to_text, + ) - persist_notification( - template_id=sample_template.id, - template_version=sample_template.version, - recipient="+16502532222", - service=sample_template.service, - personalisation={}, - notification_type="sms", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - reference="ref", - ) - mock_incr.assert_not_called() + send_notification_to_queue(notification=notification, research_mode=research_mode, queue=requested_queue) + mocked.assert_called_once_with([str(notification.id)], queue=expected_queue) -@freeze_time("2016-01-01 11:09:00.061258") -def test_persist_notification_increments_cache_if_key_exists(sample_template, sample_api_key, mocker): - mock_incr = mocker.patch("app.notifications.process_notifications.redis_store.incr") - mocker.patch("app.notifications.process_notifications.redis_store.get", return_value=1) - mocker.patch( - "app.notifications.process_notifications.redis_store.get_all_from_hash", - return_value={sample_template.id, 1}, + def test_send_notification_to_queue_throws_exception_deletes_notification(self, sample_notification, mocker): + mocked = mocker.patch( + "app.celery.provider_tasks.deliver_sms.apply_async", + side_effect=Boto3Error("EXPECTED"), + ) + with pytest.raises(Boto3Error): + send_notification_to_queue(sample_notification, False) + mocked.assert_called_once_with([(str(sample_notification.id))], queue=QueueNames.SEND_SMS_MEDIUM) + + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + + +class TestSimulatedRecipient: + @pytest.mark.parametrize( + "to_address, notification_type, expected", + [ + ("+16132532222", "sms", True), + ("+16132532223", "sms", True), + ("6132532222", "sms", True), + ("simulate-delivered@notification.canada.ca", "email", True), + ("simulate-delivered-2@notification.canada.ca", "email", True), + ("simulate-delivered-3@notification.canada.ca", "email", True), + ("6132532225", "sms", False), + ("valid_email@test.com", "email", False), + ], ) + def test_simulated_recipient(self, notify_api, to_address, notification_type, expected): + """ + The values where the expected = 'research-mode' are listed in the config['SIMULATED_EMAIL_ADDRESSES'] + and config['SIMULATED_SMS_NUMBERS']. These values should result in using the research mode queue. + SIMULATED_EMAIL_ADDRESSES = ( + 'simulate-delivered@notification.canada.ca', + 'simulate-delivered-2@notification.canada.ca', + 'simulate-delivered-2@notification.canada.ca' + ) + SIMULATED_SMS_NUMBERS = ('6132532222', '+16132532222', '+16132532223') + """ + formatted_address = None - persist_notification( - template_id=sample_template.id, - template_version=sample_template.version, - recipient="+16502532222", - service=sample_template.service, - personalisation={}, - notification_type="sms", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - reference="ref2", - ) + if notification_type == "email": + formatted_address = validate_and_format_email_address(to_address) + else: + formatted_address = validate_and_format_phone_number(to_address) - mock_incr.assert_called_once_with( - str(sample_template.service_id) + "-2016-01-01-count", - ) + is_simulated_address = simulated_recipient(formatted_address, notification_type) + assert is_simulated_address == expected -@pytest.mark.parametrize( - ("research_mode, requested_queue, notification_type, key_type, reply_to_text, expected_queue, expected_task"), - [ - (True, None, "sms", "normal", None, "research-mode-tasks", "deliver_sms"), - (True, None, "email", "normal", None, "research-mode-tasks", "deliver_email"), - (True, None, "email", "team", None, "research-mode-tasks", "deliver_email"), - ( - True, - None, - "letter", - "normal", - None, - "research-mode-tasks", - "letters_pdf_tasks.create_letters_pdf", - ), - ( - True, - None, - "sms", - "normal", - "+14383898585", - "send-throttled-sms-tasks", - "deliver_throttled_sms", - ), - (False, None, "sms", "normal", None, "send-sms-tasks", "deliver_sms"), - (False, None, "email", "normal", None, "send-email-tasks", "deliver_email"), - (False, None, "sms", "team", None, "send-sms-tasks", "deliver_sms"), - ( - False, - None, - "letter", - "normal", - None, - "create-letters-pdf-tasks", - "letters_pdf_tasks.create_letters_pdf", - ), - (False, None, "sms", "test", None, "research-mode-tasks", "deliver_sms"), - ( - False, - None, - "sms", - "normal", - "+14383898585", - "send-throttled-sms-tasks", - "deliver_throttled_sms", - ), - ( - True, - "notify-internal-tasks", - "email", - "normal", - None, - "research-mode-tasks", - "deliver_email", - ), - ( - False, - "notify-internal-tasks", - "sms", - "normal", - None, - "notify-internal-tasks", - "deliver_sms", - ), - ( - False, - "notify-internal-tasks", - "email", - "normal", - None, - "notify-internal-tasks", - "deliver_email", - ), - ( - False, - "notify-internal-tasks", - "sms", - "test", - None, - "research-mode-tasks", - "deliver_sms", - ), - ( - False, - "notify-internal-tasks", - "sms", - "normal", - "+14383898585", - "send-throttled-sms-tasks", - "deliver_throttled_sms", - ), - ], -) -def test_send_notification_to_queue( - notify_db, - notify_db_session, - research_mode, - requested_queue, - notification_type, - key_type, - reply_to_text, - expected_queue, - expected_task, - mocker, -): - if "." not in expected_task: - expected_task = f"provider_tasks.{expected_task}" - mocked = mocker.patch(f"app.celery.{expected_task}.apply_async") - notification = Notification( - id=uuid.uuid4(), - key_type=key_type, - notification_type=notification_type, - created_at=datetime.datetime(2016, 11, 11, 16, 8, 18), - reply_to_text=reply_to_text, + +# This test assumes the local timezone is EST +class TestScheduledNotification: + def test_persist_scheduled_notification(self, sample_notification): + persist_scheduled_notification(sample_notification.id, "2017-05-12 14:15") + scheduled_notification = ScheduledNotification.query.all() + assert len(scheduled_notification) == 1 + assert scheduled_notification[0].notification_id == sample_notification.id + assert scheduled_notification[0].scheduled_for == datetime.datetime(2017, 5, 12, 18, 15) + + +class TestChooseQueue: + @pytest.mark.parametrize( + ("research_mode, requested_queue, notification_type, key_type, reply_to_text, expected_queue"), + [ + (True, None, "sms", "normal", None, "research-mode-tasks"), + (True, None, "email", "normal", None, "research-mode-tasks"), + (True, None, "email", "team", None, "research-mode-tasks"), + ( + True, + None, + "letter", + "normal", + None, + "research-mode-tasks", + ), + ( + True, + None, + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + ), + (False, None, "sms", "normal", None, QueueNames.SEND_SMS_MEDIUM), + (False, None, "email", "normal", None, QueueNames.SEND_EMAIL_MEDIUM), + (False, None, "sms", "team", None, QueueNames.SEND_SMS_MEDIUM), + ( + False, + None, + "letter", + "normal", + None, + "create-letters-pdf-tasks", + ), + (False, None, "sms", "test", None, "research-mode-tasks"), + ( + False, + None, + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + ), + ( + True, + "notify-internal-tasks", + "email", + "normal", + None, + "research-mode-tasks", + ), + ( + False, + "notify-internal-tasks", + "sms", + "normal", + None, + "notify-internal-tasks", + ), + ( + False, + "notify-internal-tasks", + "email", + "normal", + None, + "notify-internal-tasks", + ), + ( + False, + "notify-internal-tasks", + "sms", + "test", + None, + "research-mode-tasks", + ), + ( + False, + "notify-internal-tasks", + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + ), + ], ) + def test_choose_queue( + self, + sample_template, + sample_api_key, + sample_job, + research_mode, + requested_queue, + notification_type, + key_type, + reply_to_text, + expected_queue, + ): + notification = Notification( + id=uuid.uuid4(), + template_id=sample_template.id, + template_version=sample_template.version, + service=sample_template.service, + personalisation={}, + notification_type=notification_type, + api_key_id=sample_api_key.id, + key_type=key_type, + job_id=sample_job.id, + job_row_number=100, + reference="ref", + reply_to_text=reply_to_text, + to="+16502532222", + created_at=datetime.datetime(2016, 11, 11, 16, 8, 18), + ) - send_notification_to_queue(notification=notification, research_mode=research_mode, queue=requested_queue) + assert choose_queue(notification, research_mode, requested_queue) == expected_queue - mocked.assert_called_once_with([str(notification.id)], queue=expected_queue) +class TestTransformNotification: + def test_transform_notification_with_optionals(self, sample_job, sample_api_key, notify_db_session): + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 -def test_send_notification_to_queue_throws_exception_deletes_notification(sample_notification, mocker): - mocked = mocker.patch( - "app.celery.provider_tasks.deliver_sms.apply_async", - side_effect=Boto3Error("EXPECTED"), - ) - with pytest.raises(Boto3Error): - send_notification_to_queue(sample_notification, False) - mocked.assert_called_once_with([(str(sample_notification.id))], queue="send-sms-tasks") - - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 - - -@pytest.mark.parametrize( - "to_address, notification_type, expected", - [ - ("+16132532222", "sms", True), - ("+16132532223", "sms", True), - ("6132532222", "sms", True), - ("simulate-delivered@notification.canada.ca", "email", True), - ("simulate-delivered-2@notification.canada.ca", "email", True), - ("simulate-delivered-3@notification.canada.ca", "email", True), - ("6132532225", "sms", False), - ("valid_email@test.com", "email", False), - ], -) -def test_simulated_recipient(notify_api, to_address, notification_type, expected): - """ - The values where the expected = 'research-mode' are listed in the config['SIMULATED_EMAIL_ADDRESSES'] - and config['SIMULATED_SMS_NUMBERS']. These values should result in using the research mode queue. - SIMULATED_EMAIL_ADDRESSES = ( - 'simulate-delivered@notification.canada.ca', - 'simulate-delivered-2@notification.canada.ca', - 'simulate-delivered-2@notification.canada.ca' + n_id = uuid.uuid4() + created_at = datetime.datetime(2016, 11, 11, 16, 8, 18) + notification = transform_notification( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient="+16502532222", + service=sample_job.service, + personalisation=None, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + created_at=created_at, + job_id=sample_job.id, + job_row_number=10, + client_reference="ref from client", + notification_id=n_id, + created_by_id=sample_job.created_by_id, + ) + assert notification.id == n_id + assert notification.job_id == sample_job.id + assert notification.job_row_number == 10 + assert notification.created_at == created_at + assert notification.client_reference == "ref from client" + assert notification.reference is None + assert notification.international is False + assert notification.phone_prefix == "1" + assert notification.rate_multiplier == 1 + assert notification.created_by_id == sample_job.created_by_id + assert not notification.reply_to_text + + @pytest.mark.parametrize( + "recipient, expected_international, expected_prefix, expected_units", + [ + ("6502532222", False, "1", 1), # NA + ("+16502532222", False, "1", 1), # NA + ("+79587714230", True, "7", 1), # Russia + ("+360623400400", True, "36", 3), + ], # Hungary ) - SIMULATED_SMS_NUMBERS = ('6132532222', '+16132532222', '+16132532223') - """ - formatted_address = None - - if notification_type == "email": - formatted_address = validate_and_format_email_address(to_address) - else: - formatted_address = validate_and_format_phone_number(to_address) + def test_transform_notification_with_international_info_stores_correct_info( + self, + sample_job, + sample_api_key, + mocker, + recipient, + expected_international, + expected_prefix, + expected_units, + ): + notification = transform_notification( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient=recipient, + service=sample_job.service, + personalisation=None, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + job_row_number=10, + client_reference="ref from client", + ) - is_simulated_address = simulated_recipient(formatted_address, notification_type) + assert notification.international is expected_international + assert notification.phone_prefix == expected_prefix + assert notification.rate_multiplier == expected_units - assert is_simulated_address == expected + def test_transform_notification_with_international_info_does_not_store_for_email(self, sample_job, sample_api_key, mocker): + notification = transform_notification( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient="foo@bar.com", + service=sample_job.service, + personalisation=None, + notification_type="email", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + job_row_number=10, + client_reference="ref from client", + ) + assert notification.international is False + assert notification.phone_prefix is None + assert notification.rate_multiplier is None + + @pytest.mark.parametrize( + "recipient, expected_recipient_normalised", + [ + ("6502532222", "+16502532222"), + (" 6502532223", "+16502532223"), + ("6502532223", "+16502532223"), + ], + ) + def test_transform_sms_notification_stores_normalised_number( + self, sample_job, sample_api_key, mocker, recipient, expected_recipient_normalised + ): + notification = transform_notification( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient=recipient, + service=sample_job.service, + personalisation=None, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + ) + assert notification.to == recipient + assert notification.normalised_to == expected_recipient_normalised -@pytest.mark.parametrize( - "recipient, expected_international, expected_prefix, expected_units", - [ - ("6502532222", False, "1", 1), # NA - ("+16502532222", False, "1", 1), # NA - ("+79587714230", True, "7", 1), # Russia - ("+360623400400", True, "36", 3), - ], # Hungary -) -def test_persist_notification_with_international_info_stores_correct_info( - sample_job, - sample_api_key, - mocker, - recipient, - expected_international, - expected_prefix, - expected_units, -): - persist_notification( - template_id=sample_job.template.id, - template_version=sample_job.template.version, - recipient=recipient, - service=sample_job.service, - personalisation=None, - notification_type="sms", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - job_id=sample_job.id, - job_row_number=10, - client_reference="ref from client", + @pytest.mark.parametrize( + "recipient, expected_recipient_normalised", + [("FOO@bar.com", "foo@bar.com"), ("BAR@foo.com", "bar@foo.com")], ) - persisted_notification = Notification.query.all()[0] - - assert persisted_notification.international is expected_international - assert persisted_notification.phone_prefix == expected_prefix - assert persisted_notification.rate_multiplier == expected_units - - -def test_persist_notification_with_international_info_does_not_store_for_email(sample_job, sample_api_key, mocker): - persist_notification( - template_id=sample_job.template.id, - template_version=sample_job.template.version, - recipient="foo@bar.com", - service=sample_job.service, - personalisation=None, - notification_type="email", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - job_id=sample_job.id, - job_row_number=10, - client_reference="ref from client", + def test_transform_email_notification_stores_normalised_email( + self, sample_job, sample_api_key, mocker, recipient, expected_recipient_normalised + ): + persist_notification( + template_id=sample_job.template.id, + template_version=sample_job.template.version, + recipient=recipient, + service=sample_job.service, + personalisation=None, + notification_type="email", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + ) + persisted_notification = Notification.query.all()[0] + + assert persisted_notification.to == recipient + assert persisted_notification.normalised_to == expected_recipient_normalised + api_key = ApiKey.query.get(sample_api_key.id) + assert api_key.last_used_timestamp is not None + + +class TestDBSaveAndSendNotification: + @freeze_time("2016-01-01 11:09:00.061258") + def test_db_save_and_send_notification_saves_to_db(self, client, sample_template, sample_api_key, sample_job, mocker): + mocked_redis = mocker.patch("app.notifications.process_notifications.redis_store.get") + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + + notification = Notification( + id=uuid.uuid4(), + template_id=sample_template.id, + template_version=sample_template.version, + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + job_row_number=100, + reference="ref", + reply_to_text=sample_template.service.get_default_sms_sender(), + to="+16502532222", + created_at=datetime.datetime(2016, 11, 11, 16, 8, 18), + ) + db_save_and_send_notification(notification) + assert Notification.query.get(notification.id) is not None + + notification_from_db = Notification.query.one() + + assert notification_from_db.id == notification.id + assert notification_from_db.template_id == notification.template_id + assert notification_from_db.template_version == notification.template_version + assert notification_from_db.api_key_id == notification.api_key_id + assert notification_from_db.key_type == notification.key_type + assert notification_from_db.key_type == notification.key_type + assert notification_from_db.billable_units == notification.billable_units + assert notification_from_db.notification_type == notification.notification_type + assert notification_from_db.created_at == notification.created_at + assert not notification_from_db.sent_at + assert notification_from_db.updated_at == notification.updated_at + assert notification_from_db.status == notification.status + assert notification_from_db.reference == notification.reference + assert notification_from_db.client_reference == notification.client_reference + assert notification_from_db.created_by_id == notification.created_by_id + assert notification_from_db.reply_to_text == sample_template.service.get_default_sms_sender() + expected_redis_calls = [ + call(str(sample_job.service_id) + "-2016-01-01-count"), + ] + assert mocked_redis.call_count == len(expected_redis_calls) + assert mocked_redis.call_args_list == expected_redis_calls + + @pytest.mark.parametrize( + ("notification_type, key_type, reply_to_text, expected_queue, expected_task"), + [ + ("sms", "normal", None, "research-mode-tasks", "deliver_sms"), + ("email", "normal", None, "research-mode-tasks", "deliver_email"), + ("email", "team", None, "research-mode-tasks", "deliver_email"), + ( + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + "deliver_throttled_sms", + ), + ("sms", "normal", None, QueueNames.SEND_SMS_MEDIUM, "deliver_sms"), + ("email", "normal", None, QueueNames.SEND_EMAIL_MEDIUM, "deliver_email"), + ("sms", "team", None, QueueNames.SEND_SMS_MEDIUM, "deliver_sms"), + ("sms", "test", None, "research-mode-tasks", "deliver_sms"), + ( + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + "deliver_throttled_sms", + ), + ( + "email", + "normal", + None, + "research-mode-tasks", + "deliver_email", + ), + ( + "sms", + "normal", + None, + "notify-internal-tasks", + "deliver_sms", + ), + ( + "email", + "normal", + None, + "notify-internal-tasks", + "deliver_email", + ), + ( + "sms", + "test", + None, + "research-mode-tasks", + "deliver_sms", + ), + ( + "sms", + "normal", + "+14383898585", + "send-throttled-sms-tasks", + "deliver_throttled_sms", + ), + ], ) - persisted_notification = Notification.query.all()[0] + def test_db_save_and_send_notification_sends_to_queue( + self, + sample_template, + notify_db, + notify_db_session, + notification_type, + key_type, + reply_to_text, + expected_queue, + expected_task, + mocker, + ): + if "." not in expected_task: + expected_task = f"provider_tasks.{expected_task}" + mocked = mocker.patch(f"app.celery.{expected_task}.apply_async") + notification = Notification( + id=uuid.uuid4(), + to="joe@blow.com", + template_id=sample_template.id, + template_version=sample_template.version, + key_type=key_type, + notification_type=notification_type, + created_at=datetime.datetime(2016, 11, 11, 16, 8, 18), + reply_to_text=reply_to_text, + queue_name=expected_queue, + ) - assert persisted_notification.international is False - assert persisted_notification.phone_prefix is None - assert persisted_notification.rate_multiplier is None + db_save_and_send_notification(notification=notification) + mocked.assert_called_once_with([str(notification.id)], queue=expected_queue) -# This test assumes the local timezone is EST -def test_persist_scheduled_notification(sample_notification): - persist_scheduled_notification(sample_notification.id, "2017-05-12 14:15") - scheduled_notification = ScheduledNotification.query.all() - assert len(scheduled_notification) == 1 - assert scheduled_notification[0].notification_id == sample_notification.id - assert scheduled_notification[0].scheduled_for == datetime.datetime(2017, 5, 12, 18, 15) - - -@pytest.mark.parametrize( - "recipient, expected_recipient_normalised", - [ - ("6502532222", "+16502532222"), - (" 6502532223", "+16502532223"), - ("6502532223", "+16502532223"), - ], -) -def test_persist_sms_notification_stores_normalised_number( - sample_job, sample_api_key, mocker, recipient, expected_recipient_normalised -): - persist_notification( - template_id=sample_job.template.id, - template_version=sample_job.template.version, - recipient=recipient, - service=sample_job.service, - personalisation=None, - notification_type="sms", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - job_id=sample_job.id, - ) - persisted_notification = Notification.query.all()[0] + def test_db_save_and_send_notification_throws_exception_deletes_notification( + self, sample_template, sample_api_key, sample_job, mocker + ): + mocked = mocker.patch( + "app.celery.provider_tasks.deliver_sms.apply_async", + side_effect=Boto3Error("EXPECTED"), + ) + notification = Notification( + id=uuid.uuid4(), + template_id=sample_template.id, + template_version=sample_template.version, + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + job_id=sample_job.id, + job_row_number=100, + reference="ref", + reply_to_text=sample_template.service.get_default_sms_sender(), + to="+16502532222", + created_at=datetime.datetime(2016, 11, 11, 16, 8, 18), + queue_name=QueueNames.SEND_SMS_MEDIUM, + ) - assert persisted_notification.to == recipient - assert persisted_notification.normalised_to == expected_recipient_normalised + with pytest.raises(Boto3Error): + db_save_and_send_notification(notification) + mocked.assert_called_once_with([(str(notification.id))], queue=QueueNames.SEND_SMS_MEDIUM) + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 -@pytest.mark.parametrize( - "recipient, expected_recipient_normalised", - [("FOO@bar.com", "foo@bar.com"), ("BAR@foo.com", "bar@foo.com")], -) -def test_persist_email_notification_stores_normalised_email( - sample_job, sample_api_key, mocker, recipient, expected_recipient_normalised -): - persist_notification( - template_id=sample_job.template.id, - template_version=sample_job.template.version, - recipient=recipient, - service=sample_job.service, - personalisation=None, - notification_type="email", - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - job_id=sample_job.id, + @pytest.mark.parametrize( + ("process_type, expected_retry_period"), + [ + (BULK, CeleryParams.RETRY_PERIODS[BULK]), + (NORMAL, CeleryParams.RETRY_PERIODS[NORMAL]), + (PRIORITY, CeleryParams.RETRY_PERIODS[PRIORITY]), + ], ) - persisted_notification = Notification.query.all()[0] + def test_retry_task_parameters(self, notify_api, process_type, expected_retry_period): + with notify_api.app_context(): + params = CeleryParams.retry(process_type) - assert persisted_notification.to == recipient - assert persisted_notification.normalised_to == expected_recipient_normalised + assert params["queue"] == QueueNames.RETRY + assert params["countdown"] == expected_retry_period - -@pytest.mark.parametrize( - "postage_argument, template_postage, expected_postage", - [ - ("second", "first", "second"), - ("first", "first", "first"), - ("first", "second", "first"), - (None, "second", "second"), - ], -) -def test_persist_letter_notification_finds_correct_postage( - mocker, - notify_db, - notify_db_session, - postage_argument, - template_postage, - expected_postage, -): - service = create_service(service_permissions=[LETTER_TYPE]) - api_key = create_api_key(notify_db, notify_db_session, service=service) - template = create_template(service, template_type=LETTER_TYPE, postage=template_postage) - mocker.patch("app.dao.templates_dao.dao_get_template_by_id", return_value=template) - persist_notification( - template_id=template.id, - template_version=template.version, - template_postage=template.postage, - recipient="Jane Doe, 10 Downing Street, London", - service=service, - personalisation=None, - notification_type=LETTER_TYPE, - api_key_id=api_key.id, - key_type=api_key.key_type, - postage=postage_argument, + @pytest.mark.parametrize( + ("process_type"), + [(BULK), (NORMAL), (PRIORITY), (None)], ) - persisted_notification = Notification.query.all()[0] - - assert persisted_notification.postage == expected_postage - - -def test_persist_notification_with_billable_units_stores_correct_info(mocker): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service, template_type=LETTER_TYPE) - mocker.patch("app.dao.templates_dao.dao_get_template_by_id", return_value=template) - persist_notification( - template_id=template.id, - template_version=template.version, - recipient="123 Main Street", - service=template.service, - personalisation=None, - notification_type=template.template_type, - api_key_id=None, - key_type="normal", - billable_units=3, - template_postage=template.postage, + def test_retry_task_parameters_with_countdown_override(self, notify_api, process_type): + with notify_api.app_context(): + params = CeleryParams.retry(process_type, countdown=-1) + + assert params["queue"] == QueueNames.RETRY + assert params["countdown"] == -1 + + @pytest.mark.parametrize( + ("process_type, expected_retry_period"), + [ + (BULK, CeleryParams.RETRY_PERIODS[BULK]), + (NORMAL, CeleryParams.RETRY_PERIODS[NORMAL]), + (PRIORITY, CeleryParams.RETRY_PERIODS[PRIORITY]), + (None, CeleryParams.RETRY_PERIODS[PRIORITY]), + ], ) - persisted_notification = Notification.query.all()[0] + def test_retry_task_parameters_with_ff_off(self, notify_api, process_type, expected_retry_period): + with notify_api.app_context(), set_config(notify_api, "FF_CELERY_CUSTOM_TASK_PARAMS", False): + params = CeleryParams.retry(process_type) + + assert params["queue"] == QueueNames.RETRY + assert params.get("countdown") is None + + def test_db_save_and_send_notification_throws_exception_when_missing_template(self, sample_api_key, mocker): + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + + notification = Notification( + id=uuid.uuid4(), + template_id=None, + template_version=None, + to="+16502532222", + service=sample_api_key.service, + personalisation=None, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + created_at=datetime.datetime(2016, 11, 11, 16, 8, 18), + ) + + with pytest.raises(SQLAlchemyError): + db_save_and_send_notification(notification) - assert persisted_notification.billable_units == 3 + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + + def test_db_save_and_send_notification_does_not_increment_cache_if_test_key( + self, notify_db, notify_db_session, sample_template, sample_job, mocker + ): + api_key = create_sample_api_key( + notify_db=notify_db, + notify_db_session=notify_db_session, + service=sample_template.service, + key_type="test", + ) + mocker.patch("app.notifications.process_notifications.redis_store.get", return_value="cache") + mocker.patch( + "app.notifications.process_notifications.redis_store.get_all_from_hash", + return_value="cache", + ) + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + daily_limit_cache = mocker.patch("app.notifications.process_notifications.redis_store.incr") + template_usage_cache = mocker.patch("app.notifications.process_notifications.redis_store.increment_hash_value") + + assert Notification.query.count() == 0 + assert NotificationHistory.query.count() == 0 + + notification = Notification( + id=uuid.uuid4(), + template_id=sample_template.id, + template_version=sample_template.version, + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=api_key.id, + key_type=api_key.key_type, + job_id=sample_job.id, + job_row_number=100, + reference="ref", + reply_to_text=sample_template.service.get_default_sms_sender(), + to="+16502532222", + created_at=datetime.datetime.utcnow(), + ) + db_save_and_send_notification(notification) + + assert Notification.query.count() == 1 + + assert not daily_limit_cache.called + assert not template_usage_cache.called + + @freeze_time("2016-01-01 11:09:00.061258") + def test_db_save_and_send_notification_doesnt_touch_cache_for_old_keys_that_dont_exist( + self, sample_template, sample_api_key, mocker + ): + mock_incr = mocker.patch("app.notifications.process_notifications.redis_store.incr") + mocker.patch("app.notifications.process_notifications.redis_store.get", return_value=None) + mocker.patch( + "app.notifications.process_notifications.redis_store.get_all_from_hash", + return_value=None, + ) + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + notification = Notification( + id=uuid.uuid4(), + template_id=sample_template.id, + template_version=sample_template.version, + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + reference="ref", + to="+16502532222", + created_at=datetime.datetime.utcnow(), + ) + db_save_and_send_notification(notification) + mock_incr.assert_not_called() + + @freeze_time("2016-01-01 11:09:00.061258") + def test_db_save_and_send_notification_increments_cache_if_key_exists(self, sample_template, sample_api_key, mocker): + mock_incr = mocker.patch("app.notifications.process_notifications.redis_store.incr") + mocker.patch("app.notifications.process_notifications.redis_store.get", return_value=1) + mocker.patch( + "app.notifications.process_notifications.redis_store.get_all_from_hash", + return_value={sample_template.id, 1}, + ) + mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") + + notification = Notification( + id=uuid.uuid4(), + template_id=sample_template.id, + template_version=sample_template.version, + service=sample_template.service, + personalisation={}, + notification_type="sms", + api_key_id=sample_api_key.id, + key_type=sample_api_key.key_type, + reference="ref2", + to="+16502532222", + created_at=datetime.datetime.utcnow(), + ) + db_save_and_send_notification(notification) + + mock_incr.assert_called_once_with( + str(sample_template.service_id) + "-2016-01-01-count", + ) diff --git a/tests/app/notifications/test_rest.py b/tests/app/notifications/test_rest.py index 21398850c8..8374b8558a 100644 --- a/tests/app/notifications/test_rest.py +++ b/tests/app/notifications/test_rest.py @@ -10,7 +10,7 @@ from app.dao.templates_dao import dao_update_template from app.models import KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, ApiKey from tests import create_authorization_header -from tests.app.conftest import sample_notification as create_sample_notification +from tests.app.conftest import create_sample_notification from tests.app.db import create_api_key, create_notification, save_notification @@ -68,7 +68,7 @@ def test_get_notifications_empty_result(client, sample_api_key): notification = json.loads(response.get_data(as_text=True)) assert notification["result"] == "error" - assert notification["message"] == "No result found" + assert notification["message"] == "Notification not found in database" assert response.status_code == 404 @@ -349,7 +349,6 @@ def test_valid_page_size_param(notify_api, notify_db, notify_db_session, sample_ def test_invalid_page_size_param(client, notify_db, notify_db_session, sample_email_template): - create_sample_notification(notify_db, notify_db_session) create_sample_notification(notify_db, notify_db_session) auth_header = create_authorization_header(service_id=sample_email_template.service_id) @@ -481,7 +480,6 @@ def test_filter_by_status_and_template_type(client, sample_template, sample_emai def test_get_notification_by_id_returns_merged_template_content(client, sample_template_with_placeholders): - sample_notification = save_notification( create_notification(sample_template_with_placeholders, personalisation={"name": "world"}) ) @@ -545,7 +543,6 @@ def test_get_notifications_for_service_returns_merged_template_content( def test_get_notification_selects_correct_template_for_personalisation(client, notify_db, notify_db_session, sample_template): - create_sample_notification( notify_db, notify_db_session, diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index c33f9e41df..f49c4dc6e0 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -7,26 +7,46 @@ import app from app.dbsetup import RoutingSQLAlchemy -from app.models import EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, LETTER_TYPE, SMS_TYPE +from app.models import ( + EMAIL_TYPE, + INTERNATIONAL_SMS_TYPE, + KEY_TYPE_TEAM, + LETTER_TYPE, + SMS_TYPE, + ApiKeyType, +) from app.notifications.validators import ( + check_email_daily_limit, check_reply_to, check_service_email_reply_to_id, check_service_letter_contact_id, - check_service_over_api_rate_limit, + check_service_over_api_rate_limit_and_update_rate, check_service_over_daily_message_limit, check_service_sms_sender_id, check_sms_content_char_count, + check_sms_daily_limit, check_template_is_active, check_template_is_for_notification_type, + increment_email_daily_count_send_warnings_if_needed, + increment_sms_daily_count_send_warnings_if_needed, service_can_send_to_recipient, validate_and_format_recipient, ) from app.utils import get_document_url -from app.v2.errors import BadRequestError, RateLimitError, TooManyRequestsError -from tests.app.conftest import sample_api_key -from tests.app.conftest import sample_notification as create_notification -from tests.app.conftest import sample_service as create_service -from tests.app.conftest import sample_service_safelist +from app.v2.errors import ( + BadRequestError, + RateLimitError, + TooManyEmailRequestsError, + TooManyRequestsError, + TooManySMSRequestsError, +) +from tests.app.conftest import ( + create_sample_api_key, + create_sample_notification, + create_sample_service, + create_sample_service_safelist, + create_sample_template, +) from tests.app.db import ( create_letter_contact, create_reply_to_email, @@ -42,202 +62,296 @@ def enable_redis(notify_api): yield -@pytest.mark.parametrize("key_type", ["test", "team", "normal"]) -def test_check_service_message_limit_in_cache_with_unrestricted_service_is_allowed(key_type, sample_service, mocker): - mocker.patch("app.notifications.validators.redis_store.get", return_value=1) - mocker.patch("app.notifications.validators.redis_store.set") - mocker.patch("app.notifications.validators.services_dao") +def count_key(limit_type, service_id): + if limit_type == "sms": + return f"sms-{service_id}-2016-01-01-count" + elif limit_type == "email": + return f"email-{service_id}-2016-01-01-count" + else: + return f"{service_id}-2016-01-01-count" - check_service_over_daily_message_limit(key_type, sample_service) - app.notifications.validators.redis_store.set.assert_not_called() - assert not app.notifications.validators.services_dao.mock_calls +def near_key(limit_type, service_id): + if limit_type == "sms": + return f"nearing-daily-limit-sms-{service_id}-2016-01-01-count" + elif limit_type == "email": + return f"nearing-daily-email-limit-email-{service_id}-2016-01-01-count" + else: + return f"nearing-{service_id}-2016-01-01-count" -@pytest.mark.parametrize("key_type", ["test", "team", "normal"]) -def test_check_service_message_limit_in_cache_under_message_limit_passes(key_type, sample_service, mocker): - mocker.patch("app.notifications.validators.redis_store.get", return_value=1) - mocker.patch("app.notifications.validators.redis_store.set") - mocker.patch("app.notifications.validators.services_dao") - check_service_over_daily_message_limit(key_type, sample_service) - app.notifications.validators.redis_store.set.assert_not_called() - assert not app.notifications.validators.services_dao.mock_calls +def over_key(limit_type, service_id): + if limit_type == "sms": + return f"over-daily-limit-sms-{service_id}-2016-01-01-count" + elif limit_type == "email": + return f"over-daily-email-limit-email-{service_id}-2016-01-01-count" + else: + return f"over-{service_id}-2016-01-01-count" -def test_should_not_interact_with_cache_for_test_key(sample_service, mocker): - mocker.patch("app.notifications.validators.redis_store") - check_service_over_daily_message_limit("test", sample_service) - assert not app.notifications.validators.redis_store.mock_calls - -@pytest.mark.parametrize("key_type", ["team", "normal"]) -def test_should_set_cache_value_as_value_from_database_if_cache_not_set( - key_type, notify_db, notify_db_session, sample_service, mocker -): - with freeze_time("2016-01-01 12:00:00.000000"): - for x in range(5): - create_notification(notify_db, notify_db_session, service=sample_service) - mocker.patch("app.notifications.validators.redis_store.get", return_value=None) +class TestCheckDailySMSEmailLimits: + @pytest.mark.parametrize( + "limit_type", + ["email", "sms"], + ) + def test_check_service_message_limit_in_cache_with_unrestricted_service_is_allowed( + self, notify_api, limit_type, sample_service, mocker + ): + mocker.patch("app.notifications.validators.redis_store.get", return_value=1) mocker.patch("app.notifications.validators.redis_store.set") - check_service_over_daily_message_limit(key_type, sample_service) - app.notifications.validators.redis_store.set.assert_called_with(str(sample_service.id) + "-2016-01-01-count", 5, ex=3600) - - -def test_should_not_access_database_if_redis_disabled(notify_api, sample_service, mocker): - with set_config(notify_api, "REDIS_ENABLED", False): - db_mock = mocker.patch("app.notifications.validators.services_dao") - - check_service_over_daily_message_limit("normal", sample_service) - - assert db_mock.method_calls == [] - - -@pytest.mark.parametrize("key_type", ["team", "normal"]) -def test_check_service_message_limit_over_message_limit_fails(key_type, notify_db, notify_db_session, mocker): - with freeze_time("2016-01-01 12:00:00.000000"): - redis_get = mocker.patch("app.redis_store.get", side_effect=["5", True, None]) - redis_set = mocker.patch("app.redis_store.set") - send_notification = mocker.patch("app.notifications.validators.send_notification_to_service_users") - - service = create_service(notify_db, notify_db_session, restricted=True, limit=4) - for x in range(5): - create_notification(notify_db, notify_db_session, service=service) - with pytest.raises(TooManyRequestsError) as e: - check_service_over_daily_message_limit(key_type, service) - assert e.value.status_code == 429 - assert e.value.message == "Exceeded send limits (4) for today" - assert e.value.fields == [] - assert redis_get.call_args_list == [ - call(f"{service.id}-2016-01-01-count"), - call(f"nearing-{service.id}-2016-01-01-count"), - call(f"over-{service.id}-2016-01-01-count"), - ] - assert redis_set.call_args_list == [call(f"over-{service.id}-2016-01-01-count", "2016-01-01T12:00:00", ex=86400)] - send_notification.assert_called_once_with( - service_id=service.id, - template_id=current_app.config["REACHED_DAILY_LIMIT_TEMPLATE_ID"], - personalisation={ - "service_name": service.name, - "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", - "message_limit_en": "4", - "message_limit_fr": "4", - }, - include_user_fields=["name"], - ) - - -def test_check_service_message_limit_records_nearing_daily_limit(notify_db, notify_db_session, mocker): - with freeze_time("2016-01-01 12:00:00.000000"): - redis_get = mocker.patch("app.redis_store.get", side_effect=[4, None]) - redis_set = mocker.patch("app.redis_store.set") - send_notification = mocker.patch("app.notifications.validators.send_notification_to_service_users") - - service = create_service(notify_db, notify_db_session, restricted=True, limit=5) - for x in range(4): - create_notification(notify_db, notify_db_session, service=service) - - check_service_over_daily_message_limit("normal", service) + mocker.patch("app.notifications.validators.services_dao") + if limit_type == "sms": + check_sms_daily_limit(sample_service) + else: + check_email_daily_limit(sample_service) + app.notifications.validators.redis_store.set.assert_not_called() + assert not app.notifications.validators.services_dao.mock_calls - assert redis_get.call_args_list == [ - call(f"{service.id}-2016-01-01-count"), - call(f"nearing-{service.id}-2016-01-01-count"), - ] - assert redis_set.call_args_list == [ - call( - f"nearing-{service.id}-2016-01-01-count", - "2016-01-01T12:00:00", - ex=86400, - ), - ] - send_notification.assert_called_once_with( - service_id=service.id, - template_id=current_app.config["NEAR_DAILY_LIMIT_TEMPLATE_ID"], - personalisation={ - "service_name": service.name, - "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", - "message_limit_en": "5", - "message_limit_fr": "5", - }, - include_user_fields=["name"], - ) + @pytest.mark.parametrize( + "limit_type", + ["email", "sms"], + ) + def test_check_service_message_limit_in_cache_under_message_limit_passes( + self, notify_api, limit_type, sample_service, mocker + ): + mocker.patch("app.notifications.validators.redis_store.get", return_value=1) + mocker.patch("app.notifications.validators.redis_store.set") + mocker.patch("app.notifications.validators.services_dao") + if limit_type == "sms": + check_sms_daily_limit(sample_service) + else: + check_email_daily_limit(sample_service) + app.notifications.validators.redis_store.set.assert_not_called() + assert not app.notifications.validators.services_dao.mock_calls + def test_should_not_interact_with_cache_for_test_key(self, notify_api, sample_service, mocker): + mocker.patch("app.notifications.validators.redis_store") + check_service_over_daily_message_limit("test", sample_service) + assert not app.notifications.validators.redis_store.mock_calls -def test_check_service_message_limit_does_not_send_notifications_if_already_did(notify_db, notify_db_session, mocker): - with freeze_time("2016-01-01 12:00:00.000000"): - redis_get = mocker.patch("app.redis_store.get", side_effect=[5, True, True]) - redis_set = mocker.patch("app.redis_store.set") - send_notification = mocker.patch("app.notifications.validators.send_notification_to_service_users") + @pytest.mark.parametrize( + "key_type", + ["team", "normal"], + ) + def test_should_set_cache_value_as_value_from_database_if_cache_not_set( + self, notify_api, key_type, notify_db, notify_db_session, sample_service, mocker + ): + with freeze_time("2016-01-01 12:00:00.000000"): + for x in range(5): + create_sample_notification(notify_db, notify_db_session, service=sample_service, billable_units=2) + mocker.patch("app.notifications.validators.redis_store.get", return_value=None) + mocker.patch("app.notifications.validators.redis_store.set") + + check_service_over_daily_message_limit(key_type, sample_service) + + app.notifications.validators.redis_store.set.assert_called_with(count_key("all", sample_service.id), 5, ex=7200) + + def test_should_not_access_database_if_redis_disabled(self, notify_api, sample_service, mocker): + with set_config(notify_api, "REDIS_ENABLED", False): + db_mock = mocker.patch("app.notifications.validators.services_dao") + check_service_over_daily_message_limit("normal", sample_service) + check_sms_daily_limit(sample_service) + + assert db_mock.method_calls == [] + + @pytest.mark.parametrize( + "key_type, email_template", + [ + ("team", "REACHED_DAILY_LIMIT_TEMPLATE_ID"), + ("normal", "REACHED_DAILY_LIMIT_TEMPLATE_ID"), + ], + ) + def test_check_service_message_limit_over_message_limit_fails( + self, notify_api, key_type, email_template, notify_db, notify_db_session, mocker + ): + with freeze_time("2016-01-01 12:00:00.000000"): + redis_get = mocker.patch("app.redis_store.get", side_effect=["5", True, None]) + redis_set = mocker.patch("app.redis_store.set") + send_notification = mocker.patch("app.notifications.validators.send_notification_to_service_users") + service = create_sample_service(notify_db, notify_db_session, restricted=True, limit=4) + for x in range(5): + create_sample_notification(notify_db, notify_db_session, service=service) + + with pytest.raises(TooManyRequestsError) as e: + check_service_over_daily_message_limit(key_type, service) + assert e.value.message == "Exceeded send limits (4) for today" + assert e.value.status_code == 429 + assert e.value.fields == [] + + assert redis_get.call_args_list == [ + call(count_key("all", service.id)), + call(near_key("all", service.id)), + call(over_key("all", service.id)), + ] + + assert redis_set.call_args_list == [call(over_key("all", service.id), "2016-01-01T12:00:00", ex=86400)] + + send_notification.assert_called_once_with( + service_id=service.id, + template_id=current_app.config[email_template], + personalisation={ + "service_name": service.name, + "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", + "message_limit_en": "4", + "message_limit_fr": "4", + }, + include_user_fields=["name"], + ) + + @pytest.mark.parametrize( + "limit_type, template_name", + [("email", "NEAR_DAILY_EMAIL_LIMIT_TEMPLATE_ID"), ("sms", "NEAR_DAILY_SMS_LIMIT_TEMPLATE_ID")], + ) + def test_check_service_message_limit_records_nearing_daily_limit( + self, notify_api, limit_type, template_name, notify_db, notify_db_session, mocker + ): + with freeze_time("2016-01-01 12:00:00.000000"): + redis_get = mocker.patch("app.redis_store.get", side_effect=[4, 4, None]) + send_notification = mocker.patch("app.notifications.validators.send_notification_to_service_users") + + service = create_sample_service(notify_db, notify_db_session, restricted=True, limit=5, sms_limit=5) + template = create_sample_template(notify_db, notify_db_session, service=service, template_type=limit_type) + for x in range(5): + create_sample_notification(notify_db, notify_db_session, service=service, template=template) + + if limit_type == "sms": + increment_sms_daily_count_send_warnings_if_needed(service) + else: + increment_email_daily_count_send_warnings_if_needed(service) + + assert redis_get.call_args_list == [ + call(count_key(limit_type, service.id)), + call(count_key(limit_type, service.id)), + call(near_key(limit_type, service.id)), + ] + kwargs = {"limit_reset_time_et_12hr": "7PM", "limit_reset_time_et_24hr": "19"} + send_notification.assert_called_once_with( + service_id=service.id, + template_id=current_app.config[template_name], + personalisation={ + "service_name": service.name, + "contact_url": f"{current_app.config['ADMIN_BASE_URL']}/contact", + "count_en": "4", + "count_fr": "4", + "remaining_en": "1", + "remaining_fr": "1", + "message_limit_en": "5", + "message_limit_fr": "5", + **kwargs, + }, + include_user_fields=["name"], + ) + + def test_check_service_message_limit_does_not_send_notifications_if_already_did( + self, notify_api, notify_db, notify_db_session, mocker + ): + with freeze_time("2016-01-01 12:00:00.000000"): + redis_get = mocker.patch("app.redis_store.get", side_effect=[5, True, True]) + redis_set = mocker.patch("app.redis_store.set") + send_notification = mocker.patch("app.notifications.validators.send_notification_to_service_users") + + service = create_sample_service(notify_db, notify_db_session, restricted=True, limit=5, sms_limit=5) + + with pytest.raises(TooManyRequestsError) as e: + check_service_over_daily_message_limit("normal", service) + assert e.value.message == "Exceeded send limits (5) for today" + assert e.value.status_code == 429 + assert e.value.fields == [] + + assert redis_get.call_args_list == [ + call(count_key("all", service.id)), + call(near_key("all", service.id)), + call(over_key("all", service.id)), + ] + redis_set.assert_not_called() + send_notification.assert_not_called() + + @pytest.mark.parametrize("key_type", ["team", "normal"]) + def test_check_service_message_limit_in_cache_over_message_limit_fails( + self, notify_api, notify_db, notify_db_session, key_type, mocker + ): + with freeze_time("2016-01-01 12:00:00.000000"): + mocker.patch("app.redis_store.get", return_value=5) + mocker.patch("app.notifications.validators.redis_store.set") + mocker.patch("app.notifications.validators.services_dao") + + service = create_sample_service(notify_db, notify_db_session, restricted=True, limit=4, sms_limit=4) + with pytest.raises(TooManyRequestsError) as e: + check_service_over_daily_message_limit(key_type, service) + assert e.value.status_code == 429 + assert e.value.message == "Exceeded send limits (4) for today" + assert e.value.fields == [] + + with pytest.raises(TooManySMSRequestsError) as e: + check_sms_daily_limit(service) + assert e.value.status_code == 429 + assert e.value.message == "Exceeded SMS daily sending limit of 4 fragments" + assert e.value.fields == [] + + with pytest.raises(TooManyEmailRequestsError) as e: + check_email_daily_limit(service) + assert e.value.status_code == 429 + assert e.value.message == "Exceeded email daily sending limit of 4 messages" + assert e.value.fields == [] + + app.notifications.validators.redis_store.set.assert_not_called() + assert not app.notifications.validators.services_dao.mock_calls + + @pytest.mark.parametrize( + "is_trial_service, expected_counter", + [ + (True, "validators.rate_limit.trial_service_daily"), + (False, "validators.rate_limit.live_service_daily"), + ], + ids=["trial service", "live service"], + ) + def test_check_service_message_limit_sends_statsd_over_message_limit_fails( + self, + notify_api, + app_statsd, + notify_db, + notify_db_session, + mocker, + is_trial_service, + expected_counter, + ): + mocker.patch("app.redis_store.get", return_value=5) + mocker.patch("app.notifications.validators.redis_store.set") - service = create_service(notify_db, notify_db_session, restricted=True, limit=5) + service = create_sample_service(notify_db, notify_db_session, restricted=is_trial_service, limit=4, sms_limit=4) - with pytest.raises(TooManyRequestsError) as e: + with pytest.raises(TooManyRequestsError): check_service_over_daily_message_limit("normal", service) - assert e.value.status_code == 429 - assert e.value.message == "Exceeded send limits (5) for today" - assert e.value.fields == [] - assert redis_get.call_args_list == [ - call(f"{service.id}-2016-01-01-count"), - call(f"nearing-{service.id}-2016-01-01-count"), - call(f"over-{service.id}-2016-01-01-count"), - ] - redis_set.assert_not_called() - send_notification.assert_not_called() + app_statsd.statsd_client.incr.assert_called_once_with(expected_counter) - -@pytest.mark.parametrize("key_type", ["team", "normal"]) -def test_check_service_message_limit_in_cache_over_message_limit_fails(notify_db, notify_db_session, key_type, mocker): - with freeze_time("2016-01-01 12:00:00.000000"): - mocker.patch("app.redis_store.get", return_value=5) + def test_check_service_message_limit_skip_statsd_over_message_no_limit_fails_sms( + self, notify_api, app_statsd, notify_db, notify_db_session, mocker + ): + # Given + mocker.patch("app.redis_store.get", return_value=0) mocker.patch("app.notifications.validators.redis_store.set") - mocker.patch("app.notifications.validators.services_dao") - - service = create_service(notify_db, notify_db_session, restricted=True, limit=4) - with pytest.raises(TooManyRequestsError) as e: - check_service_over_daily_message_limit(key_type, service) - assert e.value.status_code == 429 - assert e.value.message == "Exceeded send limits (4) for today" - assert e.value.fields == [] - app.notifications.validators.redis_store.set.assert_not_called() - assert not app.notifications.validators.services_dao.mock_calls - - -@pytest.mark.parametrize( - "is_trial_service, expected_counter", - [ - (True, "validators.rate_limit.trial_service_daily"), - (False, "validators.rate_limit.live_service_daily"), - ], - ids=["trial service", "live service"], -) -def test_check_service_message_limit_sends_statsd_over_message_limit_fails( - app_statsd, - notify_db, - notify_db_session, - mocker, - is_trial_service, - expected_counter, -): - mocker.patch("app.redis_store.get", return_value=5) - mocker.patch("app.notifications.validators.redis_store.set") - service = create_service(notify_db, notify_db_session, restricted=is_trial_service, limit=4) - with pytest.raises(TooManyRequestsError): + # When + service = create_sample_service(notify_db, notify_db_session, restricted=True, limit=4, sms_limit=4) check_service_over_daily_message_limit("normal", service) + check_sms_daily_limit(service) + # Then + app_statsd.statsd_client.incr.assert_not_called() + + def test_check_service_message_limit_skip_statsd_over_message_no_limit_fails_emails( + self, notify_api, app_statsd, notify_db, notify_db_session, mocker + ): + # Given + mocker.patch("app.redis_store.get", return_value=0) + mocker.patch("app.notifications.validators.redis_store.set") - app_statsd.statsd_client.incr.assert_called_once_with(expected_counter) - - -def test_check_service_message_limit_skip_statsd_over_message_no_limit_fails(app_statsd, notify_db, notify_db_session, mocker): - # Given - mocker.patch("app.redis_store.get", return_value=0) - mocker.patch("app.notifications.validators.redis_store.set") - - # When - service = create_service(notify_db, notify_db_session, restricted=True, limit=4) - check_service_over_daily_message_limit("normal", service) + # When + service = create_sample_service(notify_db, notify_db_session, restricted=True, limit=4, sms_limit=4) + check_email_daily_limit(service) - # Then - app_statsd.statsd_client.incr.assert_not_called() + # Then + app_statsd.statsd_client.incr.assert_not_called() @pytest.mark.parametrize("template_type, notification_type", [(EMAIL_TYPE, EMAIL_TYPE), (SMS_TYPE, SMS_TYPE)]) @@ -275,25 +389,31 @@ def test_check_template_is_active_fails(sample_template): @pytest.mark.parametrize("key_type", ["test", "normal"]) def test_service_can_send_to_recipient_passes(key_type, notify_db, notify_db_session): - trial_mode_service = create_service(notify_db, notify_db_session, service_name="trial mode", restricted=True) + trial_mode_service = create_sample_service(notify_db, notify_db_session, service_name="trial mode", restricted=True) assert service_can_send_to_recipient(trial_mode_service.users[0].email_address, key_type, trial_mode_service) is None assert service_can_send_to_recipient(trial_mode_service.users[0].mobile_number, key_type, trial_mode_service) is None @pytest.mark.parametrize("key_type", ["test", "normal"]) def test_service_can_send_to_recipient_passes_for_live_service_non_team_member(key_type, notify_db, notify_db_session): - live_service = create_service(notify_db, notify_db_session, service_name="live", restricted=False) + live_service = create_sample_service(notify_db, notify_db_session, service_name="live", restricted=False) assert service_can_send_to_recipient("some_other_email@test.com", key_type, live_service) is None assert service_can_send_to_recipient("07513332413", key_type, live_service) is None def test_service_can_send_to_recipient_passes_for_safelisted_recipient_passes(notify_db, notify_db_session, sample_service): - sample_service_safelist(notify_db, notify_db_session, email_address="some_other_email@test.com") + create_sample_service_safelist(notify_db, notify_db_session, email_address="some_other_email@test.com") assert service_can_send_to_recipient("some_other_email@test.com", "team", sample_service) is None - sample_service_safelist(notify_db, notify_db_session, mobile_number="6502532222") + create_sample_service_safelist(notify_db, notify_db_session, mobile_number="6502532222") assert service_can_send_to_recipient("6502532222", "team", sample_service) is None +def test_service_can_send_to_recipient_passes_for_simulated_recipients(notify_db, notify_db_session): + live_service = create_sample_service(notify_db, notify_db_session, service_name="live", restricted=False) + assert service_can_send_to_recipient(current_app.config["SIMULATED_EMAIL_ADDRESSES"][0], KEY_TYPE_TEAM, live_service) is None + assert service_can_send_to_recipient(current_app.config["SIMULATED_SMS_NUMBERS"][0], KEY_TYPE_TEAM, live_service) is None + + @pytest.mark.parametrize( "recipient", [ @@ -307,7 +427,7 @@ def test_service_can_send_to_recipient_fails_when_ignoring_safelist( sample_service, recipient, ): - sample_service_safelist(notify_db, notify_db_session, **recipient) + create_sample_service_safelist(notify_db, notify_db_session, **recipient) with pytest.raises(BadRequestError) as exec_info: service_can_send_to_recipient( next(iter(recipient.values())), @@ -317,7 +437,7 @@ def test_service_can_send_to_recipient_fails_when_ignoring_safelist( ) assert exec_info.value.status_code == 400 assert ( - exec_info.value.message == "Can’t send to this recipient using a team-only API key " + exec_info.value.message == f"Can’t send to this recipient using a team-only API key (service {sample_service.id}) " f'- see {get_document_url("en", "keys.html#team-and-safelist")}' ) assert exec_info.value.fields == [] @@ -327,18 +447,18 @@ def test_service_can_send_to_recipient_fails_when_ignoring_safelist( @pytest.mark.parametrize( "key_type, error_message", [ - ("team", "Can’t send to this recipient using a team-only API key - see"), + ("team", "Can’t send to this recipient using a team-only API key"), ("normal", "Can’t send to this recipient when service is in trial mode – see "), ], ) # noqa def test_service_can_send_to_recipient_fails_when_recipient_is_not_on_team( recipient: str, - key_type: str, + key_type: ApiKeyType, error_message: str, notify_db: RoutingSQLAlchemy, notify_db_session: RoutingSQLAlchemy, ): - trial_mode_service = create_service(notify_db, notify_db_session, service_name="trial mode", restricted=True) + trial_mode_service = create_sample_service(notify_db, notify_db_session, service_name="trial mode", restricted=True) with pytest.raises(BadRequestError) as exec_info: service_can_send_to_recipient(recipient, key_type, trial_mode_service) assert exec_info.value.status_code == 400 @@ -347,12 +467,12 @@ def test_service_can_send_to_recipient_fails_when_recipient_is_not_on_team( def test_service_can_send_to_recipient_fails_when_mobile_number_is_not_on_team(notify_db, notify_db_session): - live_service = create_service(notify_db, notify_db_session, service_name="live mode", restricted=False) + live_service = create_sample_service(notify_db, notify_db_session, service_name="live mode", restricted=False) with pytest.raises(BadRequestError) as e: service_can_send_to_recipient("0758964221", "team", live_service) assert e.value.status_code == 400 assert ( - e.value.message == "Can’t send to this recipient using a team-only API key " + e.value.message == f"Can’t send to this recipient using a team-only API key (service {live_service.id}) " f'- see {get_document_url("en", "keys.html#team-and-safelist")}' ) assert e.value.fields == [] @@ -360,13 +480,29 @@ def test_service_can_send_to_recipient_fails_when_mobile_number_is_not_on_team(n @pytest.mark.parametrize("char_count", [612, 0, 494, 200]) def test_check_sms_content_char_count_passes(char_count, notify_api): - assert check_sms_content_char_count(char_count) is None + assert check_sms_content_char_count(char_count, "", False) is None @pytest.mark.parametrize("char_count", [613, 700, 6000]) def test_check_sms_content_char_count_fails(char_count, notify_api): with pytest.raises(BadRequestError) as e: - check_sms_content_char_count(char_count) + check_sms_content_char_count(char_count, "", False) + assert e.value.status_code == 400 + assert e.value.message == "Content for template has a character count greater than the limit of {}".format( + SMS_CHAR_COUNT_LIMIT + ) + assert e.value.fields == [] + + +@pytest.mark.parametrize("char_count", [603, 0, 494, 200]) +def test_check_sms_content_char_count_passes_with_svc_name(char_count, notify_api): + assert check_sms_content_char_count(char_count, "service", True) is None + + +@pytest.mark.parametrize("char_count", [606, 700, 6000]) +def test_check_sms_content_char_count_fails_with_svc_name(char_count, notify_api): + with pytest.raises(BadRequestError) as e: + check_sms_content_char_count(char_count, "service", True) assert e.value.status_code == 400 assert e.value.message == "Content for template has a character count greater than the limit of {}".format( SMS_CHAR_COUNT_LIMIT @@ -377,7 +513,6 @@ def test_check_sms_content_char_count_fails(char_count, notify_api): @pytest.mark.parametrize("key_type", ["team", "live", "test"]) def test_that_when_exceed_rate_limit_request_fails(notify_db, notify_db_session, key_type, mocker): with freeze_time("2016-01-01 12:00:00.000000"): - if key_type == "live": api_key_type = "normal" else: @@ -386,10 +521,10 @@ def test_that_when_exceed_rate_limit_request_fails(notify_db, notify_db_session, mocker.patch("app.redis_store.exceeded_rate_limit", return_value=True) mocker.patch("app.notifications.validators.services_dao") - service = create_service(notify_db, notify_db_session, restricted=True) - api_key = sample_api_key(notify_db, notify_db_session, service=service, key_type=api_key_type) + service = create_sample_service(notify_db, notify_db_session, restricted=True) + api_key = create_sample_api_key(notify_db, notify_db_session, service=service, key_type=api_key_type) with pytest.raises(RateLimitError) as e: - check_service_over_api_rate_limit(service, api_key) + check_service_over_api_rate_limit_and_update_rate(service, api_key) assert app.redis_store.exceeded_rate_limit.called_with( "{}-{}".format(str(service.id), api_key.key_type), service.rate_limit, 60 @@ -406,10 +541,10 @@ def test_that_when_not_exceeded_rate_limit_request_succeeds(notify_db, notify_db mocker.patch("app.redis_store.exceeded_rate_limit", return_value=False) mocker.patch("app.notifications.validators.services_dao") - service = create_service(notify_db, notify_db_session, restricted=True) - api_key = sample_api_key(notify_db, notify_db_session, service=service, key_type="normal") + service = create_sample_service(notify_db, notify_db_session, restricted=True) + api_key = create_sample_api_key(notify_db, notify_db_session, service=service, key_type="normal") - check_service_over_api_rate_limit(service, api_key) + check_service_over_api_rate_limit_and_update_rate(service, api_key) assert app.redis_store.exceeded_rate_limit.called_with("{}-{}".format(str(service.id), api_key.key_type), 3000, 60) @@ -420,10 +555,10 @@ def test_should_not_rate_limit_if_limiting_is_disabled(notify_db, notify_db_sess mocker.patch("app.redis_store.exceeded_rate_limit", return_value=False) mocker.patch("app.notifications.validators.services_dao") - service = create_service(notify_db, notify_db_session, restricted=True) - api_key = sample_api_key(notify_db, notify_db_session, service=service) + service = create_sample_service(notify_db, notify_db_session, restricted=True) + api_key = create_sample_api_key(notify_db, notify_db_session, service=service) - check_service_over_api_rate_limit(service, api_key) + check_service_over_api_rate_limit_and_update_rate(service, api_key) assert not app.redis_store.exceeded_rate_limit.called @@ -433,7 +568,7 @@ def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_ notify_db, notify_db_session, ): - service = create_service(notify_db, notify_db_session, permissions=[SMS_TYPE]) + service = create_sample_service(notify_db, notify_db_session, permissions=[SMS_TYPE]) with pytest.raises(BadRequestError) as e: validate_and_format_recipient("+20-12-1234-1234", key_type, service, SMS_TYPE) assert e.value.status_code == 400 @@ -443,7 +578,7 @@ def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_ @pytest.mark.parametrize("key_type", ["test", "normal"]) def test_allows_api_calls_with_international_numbers_if_service_does_allow_int_sms(key_type, notify_db, notify_db_session): - service = create_service(notify_db, notify_db_session, permissions=[SMS_TYPE, INTERNATIONAL_SMS_TYPE]) + service = create_sample_service(notify_db, notify_db_session, permissions=[SMS_TYPE, INTERNATIONAL_SMS_TYPE]) result = validate_and_format_recipient("+20-12-1234-1234", key_type, service, SMS_TYPE) assert result == "+201212341234" @@ -509,10 +644,12 @@ def test_check_service_sms_sender_id_where_sms_sender_is_not_found(sample_servic assert e.value.message == "sms_sender_id {} does not exist in database for service id {}".format(fake_uuid, sample_service.id) +@pytest.mark.skip(reason="Letter tests") def test_check_service_letter_contact_id_where_letter_contact_id_is_none(): assert check_service_letter_contact_id(None, None, "letter") is None +@pytest.mark.skip(reason="Letter tests") def test_check_service_letter_contact_id_where_letter_contact_id_is_found( sample_service, ): @@ -520,6 +657,7 @@ def test_check_service_letter_contact_id_where_letter_contact_id_is_found( assert check_service_letter_contact_id(sample_service.id, letter_contact.id, LETTER_TYPE) == "123456" +@pytest.mark.skip(reason="Letter tests") def test_check_service_letter_contact_id_where_service_id_is_not_found(sample_service, fake_uuid): letter_contact = create_letter_contact(service=sample_service, contact_block="123456") with pytest.raises(BadRequestError) as e: @@ -530,6 +668,7 @@ def test_check_service_letter_contact_id_where_service_id_is_not_found(sample_se ) +@pytest.mark.skip(reason="Letter tests") def test_check_service_letter_contact_id_where_letter_contact_is_not_found(sample_service, fake_uuid): with pytest.raises(BadRequestError) as e: check_service_letter_contact_id(sample_service.id, fake_uuid, LETTER_TYPE) diff --git a/tests/app/organisation/test_rest.py b/tests/app/organisation/test_rest.py index 17335ed45a..db55b7b07b 100644 --- a/tests/app/organisation/test_rest.py +++ b/tests/app/organisation/test_rest.py @@ -6,6 +6,9 @@ dao_add_service_to_organisation, dao_add_user_to_organisation, ) +from app.dao.service_data_retention_dao import ( + fetch_service_data_retention_by_notification_type, +) from app.models import Organisation from tests.app.db import ( create_domain, @@ -94,7 +97,6 @@ def test_get_organisation_by_id(admin_request, notify_db_session): def test_get_organisation_by_id_returns_domains(admin_request, notify_db_session): - org = create_organisation( domains=[ "foo.gov.uk", @@ -230,7 +232,7 @@ def test_post_create_organisation_existing_name_raises_400(admin_request, sample }, ( "organisation_type foo is not one of " - "[central, local, nhs_central, nhs_local, nhs_gp, emergency_service, school_or_college, other]" + "[central, province_or_territory, local, nhs_central, nhs_local, nhs_gp, emergency_service, school_or_college, other]" ), ), ), @@ -335,7 +337,6 @@ def test_update_organisation_default_branding( admin_request, notify_db_session, ): - org = create_organisation(name="Test Organisation") email_branding = create_email_branding() @@ -557,6 +558,26 @@ def test_post_link_service_to_organisation_missing_payload(admin_request, sample ) +def test_link_service_to_pt_organisation(admin_request, sample_service, sample_organisation): + data = {"service_id": str(sample_service.id)} + sample_organisation.organisation_type = "province_or_territory" + + admin_request.post( + "organisation.link_service_to_organisation", + _data=data, + organisation_id=sample_organisation.id, + _expected_status=204, + ) + + assert len(sample_organisation.services) == 1 + + email_retention = fetch_service_data_retention_by_notification_type(sample_service.id, "email") + sms_retention = fetch_service_data_retention_by_notification_type(sample_service.id, "sms") + + assert email_retention.days_of_retention == 3 + assert sms_retention.days_of_retention == 3 + + def test_rest_get_organisation_services(admin_request, sample_organisation, sample_service): dao_add_service_to_organisation(sample_service, sample_organisation.id) response = admin_request.get( diff --git a/tests/app/service/test_api_key_endpoints.py b/tests/app/service/test_api_key_endpoints.py index cea0c174bb..2bdc7cbe43 100644 --- a/tests/app/service/test_api_key_endpoints.py +++ b/tests/app/service/test_api_key_endpoints.py @@ -1,12 +1,11 @@ import json -from flask import url_for +from flask import current_app, url_for from app.dao.api_key_dao import expire_api_key from app.models import KEY_TYPE_NORMAL, ApiKey from tests import create_authorization_header -from tests.app.conftest import sample_api_key as create_sample_api_key -from tests.app.conftest import sample_service as create_sample_service +from tests.app.conftest import create_sample_api_key, create_sample_service from tests.app.db import create_user @@ -158,3 +157,47 @@ def test_get_api_keys_should_return_one_key_for_service(notify_api, sample_api_k assert response.status_code == 200 json_resp = json.loads(response.get_data(as_text=True)) assert len(json_resp["apiKeys"]) == 1 + + +def test_create_api_key_expected_format_by_admin(notify_api, sample_service): + with notify_api.test_request_context(): + with notify_api.test_client() as client: + assert ApiKey.query.count() == 0 + data = { + "name": "new prefixed key", + "created_by": str(sample_service.created_by.id), + "key_type": KEY_TYPE_NORMAL, + } + auth_header = create_authorization_header() + response = client.post( + url_for("service.create_api_key", service_id=sample_service.id), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + + response_data = json.loads(response.get_data(as_text=True)) + assert "data" in response_data + assert "key" in response_data["data"] + assert "key_name" in response_data["data"] + + +def test_create_api_key_ensure_key_is_prefixed(notify_api, sample_service): + with notify_api.test_request_context(): + with notify_api.test_client() as client: + assert ApiKey.query.count() == 0 + data = { + "name": "new prefixed key", + "created_by": str(sample_service.created_by.id), + "key_type": KEY_TYPE_NORMAL, + } + auth_header = create_authorization_header() + response = client.post( + url_for("service.create_api_key", service_id=sample_service.id), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + + response_data = json.loads(response.get_data(as_text=True)) + assert current_app.config["API_KEY_PREFIX"] in response_data["data"]["key_name"] diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index 4b37247bc8..4c2069a6e2 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -11,9 +11,12 @@ from notifications_utils.clients.redis import ( daily_limit_cache_key, near_daily_limit_cache_key, + near_email_daily_limit_cache_key, over_daily_limit_cache_key, + over_email_daily_limit_cache_key, ) +from app.clients.salesforce.salesforce_engagement import ENGAGEMENT_STAGE_LIVE from app.dao.organisation_dao import dao_add_service_to_organisation from app.dao.service_sms_sender_dao import dao_get_sms_senders_by_service_id from app.dao.service_user_dao import dao_get_service_user @@ -35,16 +38,15 @@ Notification, Service, ServiceEmailReplyTo, - ServiceLetterContact, ServicePermission, ServiceSmsSender, User, ) from tests import create_authorization_header -from tests.app.conftest import sample_notification as create_sample_notification -from tests.app.conftest import sample_notification_with_job from tests.app.conftest import ( - sample_user_service_permission as create_user_service_permission, + create_sample_notification, + create_sample_notification_with_job, + create_sample_user_service_permission, ) from tests.app.db import ( create_annual_billing, @@ -54,7 +56,6 @@ create_ft_notification_status, create_inbound_number, create_letter_branding, - create_letter_contact, create_notification, create_organisation, create_reply_to_email, @@ -67,6 +68,7 @@ create_user, save_notification, ) +from tests.conftest import set_config def test_get_service_list(client, service_factory): @@ -230,6 +232,40 @@ def test_get_live_services_data(sample_user, admin_request): ] +def test_get_delivered_notification_stats_by_month_data(admin_request, sample_service): + email_template = create_template(service=sample_service, template_type="email", template_name="b") + + create_ft_notification_status( + utc_date=date(2019, 12, 10), + service=sample_service, + template=email_template, + count=3, + ) + + response = admin_request.get("service.get_delivered_notification_stats_by_month_data")["data"] + + assert len(response) == 1 + assert sorted(list(response[0])) == ["count", "month", "notification_type"] + first = response[0] + assert first["month"].startswith("2019-12-01") + assert first["notification_type"] == "email" + assert first["count"] == 3 + + +def test_get_delivered_notification_stats_by_month_data_without_heartbeat(notify_api, admin_request, sample_service): + email_template = create_template(service=sample_service, template_type="email", template_name="b") + + create_ft_notification_status( + utc_date=date(2019, 12, 10), + service=sample_service, + template=email_template, + count=3, + ) + with set_config(notify_api, "NOTIFY_SERVICE_ID", email_template.service_id): + response = admin_request.get("service.get_delivered_notification_stats_by_month_data", filter_heartbeats=True)["data"] + assert len(response) == 0 + + def test_get_service_by_id(admin_request, sample_service): json_resp = admin_request.get("service.get_service_by_id", service_id=sample_service.id) assert json_resp["data"]["name"] == sample_service.name @@ -337,16 +373,17 @@ def test_create_service(admin_request, sample_user, platform_admin, expected_cou "name": "created service", "user_id": str(sample_user.id), "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "email_from": "created.service", "created_by": str(sample_user.id), } - zd_send_create_service_mock = mocker.patch("app.user.rest.ZenDeskSell.send_create_service", return_value=True) + mocked_salesforce_client = mocker.patch("app.service.rest.salesforce_client") + json_resp = admin_request.post("service.create_service", _data=data, _expected_status=201) - zd_send_create_service_mock.assert_called() assert json_resp["data"]["id"] assert json_resp["data"]["name"] == "created service" assert json_resp["data"]["email_from"] == "created.service" @@ -354,6 +391,7 @@ def test_create_service(admin_request, sample_user, platform_admin, expected_cou assert json_resp["data"]["rate_limit"] == 1000 assert json_resp["data"]["letter_branding"] is None assert json_resp["data"]["count_as_live"] is expected_count_as_live + mocked_salesforce_client.engagement_create.assert_called_once() service_db = Service.query.get(json_resp["data"]["id"]) assert service_db.name == "created service" @@ -385,7 +423,6 @@ def test_create_service(admin_request, sample_user, platform_admin, expected_cou ), ) def test_create_service_with_domain_sets_organisation(admin_request, sample_user, domain, expected_org, mocker): - red_herring_org = create_organisation(name="Sub example") create_domain("specific.example.gov.uk", red_herring_org.id) create_domain("aaaaaaaa.example.gov.uk", red_herring_org.id) @@ -404,24 +441,22 @@ def test_create_service_with_domain_sets_organisation(admin_request, sample_user "name": "created service", "user_id": str(sample_user.id), "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "email_from": "created.service", "created_by": str(sample_user.id), "service_domain": domain, } - zd_send_create_service_mock = mocker.patch("app.user.rest.ZenDeskSell.send_create_service", return_value=True) json_resp = admin_request.post("service.create_service", _data=data, _expected_status=201) - zd_send_create_service_mock.assert_called() if expected_org: assert json_resp["data"]["organisation"] == str(org.id) else: assert json_resp["data"]["organisation"] is None -def test_create_service_inherits_branding_from_organisation(admin_request, sample_user, mocker): - +def test_create_service_doesnt_inherit_branding_from_organisation(admin_request, sample_user, mocker): org = create_organisation() email_branding = create_email_branding() org.email_branding = email_branding @@ -430,13 +465,13 @@ def test_create_service_inherits_branding_from_organisation(admin_request, sampl create_domain("example.gov.uk", org.id) sample_user.email_address = "test@example.gov.uk" - zd_send_create_service_mock = mocker.patch("app.user.rest.ZenDeskSell.send_create_service", return_value=True) json_resp = admin_request.post( "service.create_service", _data={ "name": "created service", "user_id": str(sample_user.id), "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "email_from": "created.service", @@ -445,9 +480,7 @@ def test_create_service_inherits_branding_from_organisation(admin_request, sampl _expected_status=201, ) - zd_send_create_service_mock.assert_called() - assert json_resp["data"]["email_branding"] == str(email_branding.id) - assert json_resp["data"]["letter_branding"] == str(letter_branding.id) + assert json_resp["data"]["email_branding"] is None def test_should_not_create_service_with_missing_user_id_field(notify_api, fake_uuid): @@ -457,6 +490,7 @@ def test_should_not_create_service_with_missing_user_id_field(notify_api, fake_u "email_from": "service", "name": "created service", "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "created_by": str(fake_uuid), @@ -477,6 +511,7 @@ def test_should_error_if_created_by_missing(notify_api, sample_user): "email_from": "service", "name": "created service", "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "user_id": str(sample_user.id), @@ -503,6 +538,7 @@ def test_should_not_create_service_with_missing_if_user_id_is_not_in_database( "user_id": fake_uuid, "name": "created service", "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "created_by": str(fake_uuid), @@ -528,6 +564,7 @@ def test_should_not_create_service_if_missing_data(notify_api, sample_user): assert json_resp["result"] == "error" assert "Missing data for required field." in json_resp["message"]["name"] assert "Missing data for required field." in json_resp["message"]["message_limit"] + # assert "Missing data for required field." in json_resp["message"]["sms_daily_limit"] # TODO: put this back in once we add sms_daily_limit to admin assert "Missing data for required field." in json_resp["message"]["restricted"] @@ -538,6 +575,7 @@ def test_should_not_create_service_with_duplicate_name(notify_api, sample_user, "name": sample_service.name, "user_id": str(sample_service.users[0].id), "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "email_from": "sample.service2", @@ -560,6 +598,7 @@ def test_create_service_should_throw_duplicate_key_constraint_for_existing_email "name": service_name, "user_id": str(first_service.users[0].id), "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "email_from": "first.service", @@ -621,47 +660,6 @@ def test_cant_update_service_org_type_to_random_value(client, sample_service): assert resp.status_code == 500 -def test_update_service_letter_branding(client, notify_db, sample_service): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - data = {"letter_branding": str(letter_branding.id)} - - auth_header = create_authorization_header() - - resp = client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - result = resp.json - assert resp.status_code == 200 - assert result["data"]["letter_branding"] == str(letter_branding.id) - - -def test_update_service_remove_letter_branding(client, notify_db, sample_service): - letter_branding = create_letter_branding(name="test brand", filename="test-brand") - sample_service - data = {"letter_branding": str(letter_branding.id)} - - auth_header = create_authorization_header() - - client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - data = {"letter_branding": None} - resp = client.post( - "/service/{}".format(sample_service.id), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - result = resp.json - assert resp.status_code == 200 - assert result["data"]["letter_branding"] is None - - def test_update_service_remove_email_branding(admin_request, notify_db, sample_service): brand = EmailBranding(colour="#000000", logo="justice-league.png", name="Justice League") sample_service.email_branding = brand @@ -763,7 +761,10 @@ def test_update_service_sets_volumes( ( (True, 200, True), (False, 200, False), - ("Yes", 400, None), + ("Yes", 200, True), + ("No", 200, False), + ("Oui", 400, None), + ("Non", 400, None), ), ) def test_update_service_sets_research_consent( @@ -1070,12 +1071,11 @@ def test_default_permissions_are_added_for_user_service( ): with notify_api.test_request_context(): with notify_api.test_client() as client: - - zd_send_create_service_mock = mocker.patch("app.user.rest.ZenDeskSell.send_create_service", return_value=True) data = { "name": "created service", "user_id": str(sample_user.id), "message_limit": 1000, + "sms_daily_limit": 1000, "restricted": False, "active": False, "email_from": "created.service", @@ -1085,7 +1085,6 @@ def test_default_permissions_are_added_for_user_service( headers = [("Content-Type", "application/json"), auth_header] resp = client.post("/service", data=json.dumps(data), headers=headers) - zd_send_create_service_mock.assert_called() json_resp = resp.json assert resp.status_code == 201 assert json_resp["data"]["id"] @@ -1449,10 +1448,11 @@ def test_add_unknown_user_to_service_returns404(notify_api, notify_db, notify_db assert result["message"] == expected_message -def test_remove_user_from_service(notify_db, notify_db_session, client, sample_user_service_permission): +def test_remove_user_from_service(notify_db, notify_db_session, client, sample_user_service_permission, mocker): + mocked_salesforce_client = mocker.patch("app.service.rest.salesforce_client") second_user = create_user(email="new@digital.cabinet-office.gov.uk") # Simulates successfully adding a user to the service - second_permission = create_user_service_permission(notify_db, notify_db_session, user=second_user) + second_permission = create_sample_user_service_permission(notify_db, notify_db_session, user=second_user) endpoint = url_for( "service.remove_user_from_service", service_id=str(second_permission.service.id), @@ -1461,6 +1461,7 @@ def test_remove_user_from_service(notify_db, notify_db_session, client, sample_u auth_header = create_authorization_header() resp = client.delete(endpoint, headers=[("Content-Type", "application/json"), auth_header]) assert resp.status_code == 204 + mocked_salesforce_client.engagement_delete_contact_role.assert_called_with(second_permission.service, second_permission.user) def test_remove_non_existant_user_from_service(client, sample_user_service_permission): @@ -1493,7 +1494,7 @@ def test_cannot_remove_only_user_from_service(notify_api, notify_db, notify_db_s # This test is just here verify get_service_and_api_key_history that is a temp solution # until proper ui is sorted out on admin app def test_get_service_and_api_key_history(notify_api, notify_db, notify_db_session, sample_service): - from tests.app.conftest import sample_api_key as create_sample_api_key + from tests.app.conftest import create_sample_api_key api_key = create_sample_api_key(notify_db, notify_db_session, service=sample_service) @@ -1550,7 +1551,7 @@ def test_get_all_notifications_for_service_formatted_for_csv(client, sample_temp assert not resp["notifications"][0]["row_number"] assert resp["notifications"][0]["template_name"] == sample_template.name assert resp["notifications"][0]["template_type"] == notification.notification_type - assert resp["notifications"][0]["status"] == "Sending" + assert resp["notifications"][0]["status"] == "In transit" def test_get_notification_for_service_without_uuid(client, notify_db, notify_db_session): @@ -1563,7 +1564,6 @@ def test_get_notification_for_service_without_uuid(client, notify_db, notify_db_ def test_get_notification_for_service(client, notify_db, notify_db_session): - service_1 = create_service(service_name="1", email_from="1") service_2 = create_service(service_name="2", email_from="2") @@ -1590,7 +1590,7 @@ def test_get_notification_for_service(client, notify_db, notify_db_session): ) assert service_2_response.status_code == 404 service_2_response = json.loads(service_2_response.get_data(as_text=True)) - assert service_2_response == {"message": "No result found", "result": "error"} + assert service_2_response == {"message": "Notification not found in database", "result": "error"} def test_get_notification_for_service_includes_created_by(admin_request, sample_notification): @@ -1637,7 +1637,7 @@ def test_get_all_notifications_for_service_including_ones_made_by_jobs( include_from_test_key, expected_count_of_notifications, ): - with_job = sample_notification_with_job(notify_db, notify_db_session, service=sample_service) + with_job = create_sample_notification_with_job(notify_db, notify_db_session, service=sample_service) without_job = create_sample_notification(notify_db, notify_db_session, service=sample_service) # from_test_api_key create_sample_notification(notify_db, notify_db_session, service=sample_service, key_type=KEY_TYPE_TEST) @@ -1701,6 +1701,35 @@ def test_get_notifications_for_service_without_page_count( assert resp["notifications"][0]["id"] == str(without_job.id) +@freeze_time("2018-11-20T18:00:00") +@pytest.mark.parametrize("retention_period, expected_count_of_notifications", [(3, 72), (7, 168)]) +def test_get_notifications_for_service_gets_data_from_correct_timeframe( + admin_request, sample_service, retention_period, expected_count_of_notifications +): + email_template = create_template(service=sample_service, template_type=EMAIL_TYPE) + + # WEEK BEFORE + # Create 12 notifications for each hour of the day for 1 week + for i in range(retention_period): + for j in range(24): + save_notification( + create_notification(email_template, created_at=datetime(2018, 11, 5 + i, j, 0, 0), status="delivered") + ) + + # THIS WEEK + # Create 12 notifications for each hour of the day for 1 week + for i in range(retention_period): + for j in range(24): + save_notification( + create_notification(email_template, created_at=datetime(2018, 11, 13 + i, j, 0, 0), status="delivered") + ) + + resp = admin_request.get( + "service.get_all_notifications_for_service", service_id=email_template.service_id, limit_days=7, page_size=1 + ) + assert resp["total"] == expected_count_of_notifications + + @pytest.mark.parametrize( "should_prefix", [ @@ -1995,7 +2024,6 @@ def test_get_detailed_services_for_date_range(sample_template, start_date_delta, def test_search_for_notification_by_to_field(client, sample_template, sample_email_template): - notification1 = save_notification( create_notification(template=sample_template, to_field="+16502532222", normalised_to="+16502532222") ) @@ -2082,9 +2110,10 @@ def test_update_service_calls_send_notification_as_service_becomes_live( data = {"restricted": False} - zd_send_go_live_service_mock = mocker.patch("app.user.rest.ZenDeskSell.send_go_live_service", return_value=True) - fetch_service_creator_mock = mocker.patch("app.service.rest.dao_fetch_service_creator", return_value=user_1) - get_user_by_id_mock = mocker.patch("app.service.rest.get_user_by_id", return_value=user_2) + mocked_salesforce_client = mocker.patch("app.service.rest.salesforce_client") + mocked_fetch_service_creator = mocker.patch("app.service.rest.dao_fetch_service_creator", return_value=user_1) + mocked_get_user_by_id = mocker.patch("app.service.rest.get_user_by_id", return_value=user_2) + auth_header = create_authorization_header() resp = client.post( "service/{}".format(restricted_service.id), @@ -2093,15 +2122,7 @@ def test_update_service_calls_send_notification_as_service_becomes_live( content_type="application/json", ) - zd_send_go_live_service_mock.assert_called_once_with(restricted_service, user_2 if set_go_live_user else user_1) - if set_go_live_user: - fetch_service_creator_mock.assert_not_called() - get_user_by_id_mock.assert_called_once_with(restricted_service.go_live_user_id) - else: - get_user_by_id_mock.assert_not_called() - fetch_service_creator_mock.assert_called_once_with(restricted_service.id) - - assert resp.status_code == 200 + assert resp.status_code == 200 # type: ignore send_notification_mock.assert_called_once_with( service_id=restricted_service.id, template_id="618185c6-3636-49cd-b7d2-6f6f5eb3bdde", @@ -2115,6 +2136,17 @@ def test_update_service_calls_send_notification_as_service_becomes_live( include_user_fields=["name"], ) + engagement_user = user_2 if set_go_live_user else user_1 + mocked_salesforce_client.engagement_update.assert_called_once_with( + restricted_service, engagement_user, {"StageName": ENGAGEMENT_STAGE_LIVE} + ) + if set_go_live_user: + mocked_fetch_service_creator.assert_not_called() + mocked_get_user_by_id.assert_called_once_with(restricted_service.go_live_user_id) + else: + mocked_fetch_service_creator.assert_called_once_with(restricted_service.id) + mocked_get_user_by_id.assert_not_called() + @pytest.mark.parametrize( "current_limit, new_limit, expected_call", @@ -2151,6 +2183,8 @@ def test_update_service_updating_daily_limit_clears_redis_cache( call(daily_limit_cache_key(service.id)), call(near_daily_limit_cache_key(service.id)), call(over_daily_limit_cache_key(service.id)), + call(near_email_daily_limit_cache_key(service.id)), + call(over_email_daily_limit_cache_key(service.id)), ] else: redis_delete.assert_not_called() @@ -2191,7 +2225,7 @@ def test_update_service_updating_daily_limit_sends_notification_to_users( if expected_call: send_notification_mock.assert_called_once_with( service_id=service.id, - template_id=current_app.config["DAILY_LIMIT_UPDATED_TEMPLATE_ID"], + template_id=current_app.config["DAILY_EMAIL_LIMIT_UPDATED_TEMPLATE_ID"], personalisation={ "service_name": service.name, "message_limit_en": "{:,}".format(new_limit), @@ -2237,6 +2271,24 @@ def test_update_service_does_not_call_send_notification_when_restricted_not_chan assert not send_notification_mock.called +def test_update_service_name_updates_salesforce_engagement(sample_service, client, mocker): + user = create_user(email="active1@foo.com", state="active") + mocked_salesforce_client = mocker.patch("app.service.rest.salesforce_client") + mocker.patch("app.service.rest.dao_fetch_service_creator", return_value=user) + + data = {"name": "New service name"} + + auth_header = create_authorization_header() + resp = client.post( + "service/{}".format(sample_service.id), + data=json.dumps(data), + headers=[auth_header], + content_type="application/json", + ) + assert resp.status_code == 200 + mocked_salesforce_client.engagement_update.assert_called_once_with(sample_service, user, {"Name": "New service name"}) + + def test_search_for_notification_by_to_field_filters_by_status(client, notify_db, notify_db_session): create_notification = partial( create_sample_notification, @@ -2334,25 +2386,6 @@ def test_send_one_off_notification(sample_service, admin_request, mocker): assert response["id"] == str(noti.id) -def test_create_pdf_letter(mocker, sample_service_full_permissions, client, fake_uuid, notify_user): - mocker.patch("app.service.send_notification.utils_s3download") - mocker.patch("app.service.send_notification.get_page_count", return_value=1) - mocker.patch("app.service.send_notification.move_uploaded_pdf_to_letters_bucket") - - user = sample_service_full_permissions.users[0] - data = json.dumps({"filename": "valid.pdf", "created_by": str(user.id), "file_id": fake_uuid}) - - response = client.post( - url_for("service.create_pdf_letter", service_id=sample_service_full_permissions.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - json_resp = json.loads(response.get_data(as_text=True)) - - assert response.status_code == 201 - assert json_resp == {"id": fake_uuid} - - def test_get_notification_for_service_includes_template_redacted(admin_request, sample_notification): resp = admin_request.get( "service.get_notification_for_service", @@ -2821,23 +2854,38 @@ def test_delete_service_reply_to_email_address_archives_an_email_reply_to(sample assert reply_to.archived is True -def test_delete_service_reply_to_email_address_returns_400_if_archiving_default_reply_to( +def test_delete_service_reply_to_email_address_archives_default_reply_to_if_no_others_exist( admin_request, notify_db_session, sample_service ): reply_to = create_reply_to_email(service=sample_service, email_address="some@email.com") - response = admin_request.post( + admin_request.post( "service.delete_service_reply_to_email_address", service_id=sample_service.id, reply_to_email_id=reply_to.id, + ) + + assert reply_to.archived is True + + +def test_delete_service_reply_to_email_address_returns_400_if_archiving_default_reply_to_and_others_exist( + admin_request, notify_db_session, sample_service +): + reply_to_1 = create_reply_to_email(service=sample_service, email_address="some_1@email.com") + create_reply_to_email(service=sample_service, email_address="some_2@email.com") + + response = admin_request.post( + "service.delete_service_reply_to_email_address", + service_id=sample_service.id, + reply_to_email_id=reply_to_1.id, _expected_status=400, ) assert response == { - "message": "You cannot delete a default email reply to address", + "message": "You cannot delete a default email reply to address if other reply to addresses exist", "result": "error", } - assert reply_to.archived is False + assert reply_to_1.archived is False def test_get_email_reply_to_address(client, notify_db, notify_db_session): @@ -2853,218 +2901,6 @@ def test_get_email_reply_to_address(client, notify_db, notify_db_session): assert json.loads(response.get_data(as_text=True)) == reply_to.serialize() -def test_get_letter_contacts_when_there_are_no_letter_contacts(client, sample_service): - response = client.get( - "/service/{}/letter-contact".format(sample_service.id), - headers=[create_authorization_header()], - ) - - assert json.loads(response.get_data(as_text=True)) == [] - assert response.status_code == 200 - - -def test_get_letter_contacts_with_one_letter_contact(client, notify_db, notify_db_session): - service = create_service() - create_letter_contact(service, "Aberdeen, AB23 1XH") - - response = client.get( - "/service/{}/letter-contact".format(service.id), - headers=[create_authorization_header()], - ) - json_response = json.loads(response.get_data(as_text=True)) - - assert len(json_response) == 1 - assert json_response[0]["contact_block"] == "Aberdeen, AB23 1XH" - assert json_response[0]["is_default"] - assert json_response[0]["created_at"] - assert not json_response[0]["updated_at"] - assert response.status_code == 200 - - -def test_get_letter_contacts_with_multiple_letter_contacts(client, notify_db, notify_db_session): - service = create_service() - letter_contact_a = create_letter_contact(service, "Aberdeen, AB23 1XH") - letter_contact_b = create_letter_contact(service, "London, E1 8QS", False) - - response = client.get( - "/service/{}/letter-contact".format(service.id), - headers=[create_authorization_header()], - ) - json_response = json.loads(response.get_data(as_text=True)) - - assert len(json_response) == 2 - assert response.status_code == 200 - - assert json_response[0]["id"] == str(letter_contact_a.id) - assert json_response[0]["service_id"] == str(letter_contact_a.service_id) - assert json_response[0]["contact_block"] == "Aberdeen, AB23 1XH" - assert json_response[0]["is_default"] - assert json_response[0]["created_at"] - assert not json_response[0]["updated_at"] - - assert json_response[1]["id"] == str(letter_contact_b.id) - assert json_response[1]["service_id"] == str(letter_contact_b.service_id) - assert json_response[1]["contact_block"] == "London, E1 8QS" - assert not json_response[1]["is_default"] - assert json_response[1]["created_at"] - assert not json_response[1]["updated_at"] - - -def test_get_letter_contact_by_id(client, notify_db, notify_db_session): - service = create_service() - letter_contact = create_letter_contact(service, "London, E1 8QS") - - response = client.get( - "/service/{}/letter-contact/{}".format(service.id, letter_contact.id), - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 200 - assert json.loads(response.get_data(as_text=True)) == letter_contact.serialize() - - -def test_get_letter_contact_return_404_when_invalid_contact_id(client, notify_db, notify_db_session): - service = create_service() - - response = client.get( - "/service/{}/letter-contact/{}".format(service.id, "93d59f88-4aa1-453c-9900-f61e2fc8a2de"), - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - - -def test_add_service_contact_block(client, sample_service): - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 201 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 1 - assert json_resp["data"] == results[0].serialize() - - -def test_add_service_letter_contact_can_add_multiple_addresses(client, sample_service): - first = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=first, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - second = json.dumps({"contact_block": "Aberdeen, AB23 1XH", "is_default": True}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=second, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 201 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 2 - default = [x for x in results if x.is_default] - assert json_resp["data"] == default[0].serialize() - first_letter_contact_not_default = [x for x in results if not x.is_default] - assert first_letter_contact_not_default[0].contact_block == "London, E1 8QS" - - -def test_add_service_letter_contact_block_fine_if_no_default(client, sample_service): - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": False}) - response = client.post( - "/service/{}/letter-contact".format(sample_service.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 201 - - -def test_add_service_letter_contact_block_404s_when_invalid_service_id(client, notify_db, notify_db_session): - response = client.post( - "/service/{}/letter-contact".format(uuid.uuid4()), - data={}, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - result = json.loads(response.get_data(as_text=True)) - assert result["result"] == "error" - assert result["message"] == "No result found" - - -def test_update_service_letter_contact(client, sample_service): - original_letter_contact = create_letter_contact(service=sample_service, contact_block="Aberdeen, AB23 1XH") - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - response = client.post( - "/service/{}/letter-contact/{}".format(sample_service.id, original_letter_contact.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 200 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 1 - assert json_resp["data"] == results[0].serialize() - - -def test_update_service_letter_contact_returns_200_when_no_default(client, sample_service): - original_reply_to = create_letter_contact(service=sample_service, contact_block="Aberdeen, AB23 1XH") - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": False}) - response = client.post( - "/service/{}/letter-contact/{}".format(sample_service.id, original_reply_to.id), - data=data, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - assert response.status_code == 200 - - -def test_update_service_letter_contact_returns_404_when_invalid_service_id(client, notify_db, notify_db_session): - response = client.post( - "/service/{}/letter-contact/{}".format(uuid.uuid4(), uuid.uuid4()), - data={}, - headers=[("Content-Type", "application/json"), create_authorization_header()], - ) - - assert response.status_code == 404 - result = json.loads(response.get_data(as_text=True)) - assert result["result"] == "error" - assert result["message"] == "No result found" - - -def test_delete_service_letter_contact_can_archive_letter_contact(admin_request, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Edinburgh, ED1 1AA") - letter_contact = create_letter_contact(service=service, contact_block="Swansea, SN1 3CC", is_default=False) - - admin_request.post( - "service.delete_service_letter_contact", - service_id=service.id, - letter_contact_id=letter_contact.id, - ) - - assert letter_contact.archived is True - - -def test_delete_service_letter_contact_returns_200_if_archiving_template_default(admin_request, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Edinburgh, ED1 1AA") - letter_contact = create_letter_contact(service=service, contact_block="Swansea, SN1 3CC", is_default=False) - create_template(service=service, template_type="letter", reply_to=letter_contact.id) - - response = admin_request.post( - "service.delete_service_letter_contact", - service_id=service.id, - letter_contact_id=letter_contact.id, - _expected_status=200, - ) - assert response["data"]["archived"] is True - - def test_add_service_sms_sender_can_add_multiple_senders(client, notify_db_session): service = create_service() data = { @@ -3370,91 +3206,6 @@ def test_cancel_notification_for_service_raises_invalid_request_when_notificatio assert response["result"] == "error" -@pytest.mark.parametrize( - "notification_status", - [ - "cancelled", - "sending", - "sent", - "delivered", - "pending", - "failed", - "technical-failure", - "temporary-failure", - "permanent-failure", - "validation-failed", - "virus-scan-failed", - "returned-letter", - ], -) -@freeze_time("2018-07-07 12:00:00") -def test_cancel_notification_for_service_raises_invalid_request_when_letter_is_in_wrong_state_to_be_cancelled( - admin_request, - sample_letter_notification, - notification_status, -): - sample_letter_notification.status = notification_status - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - _expected_status=400, - ) - assert response["message"] == "It’s too late to cancel this letter. Printing started today at 5.30pm" - assert response["result"] == "error" - - -@pytest.mark.parametrize("notification_status", ["created", "pending-virus-check"]) -@freeze_time("2018-07-07 16:00:00") -def test_cancel_notification_for_service_updates_letter_if_letter_is_in_cancellable_state( - admin_request, - sample_letter_notification, - notification_status, -): - sample_letter_notification.status = notification_status - sample_letter_notification.created_at = datetime.now() - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - ) - assert response["status"] == "cancelled" - - -@freeze_time("2017-12-12 17:30:00") -def test_cancel_notification_for_service_raises_error_if_its_too_late_to_cancel( - admin_request, - sample_letter_notification, -): - sample_letter_notification.created_at = datetime(2017, 12, 11, 17, 0) - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - _expected_status=400, - ) - assert response["message"] == "It’s too late to cancel this letter. Printing started on 11 December at 5.30pm" - assert response["result"] == "error" - - -@freeze_time("2018-7-7 16:00:00") -def test_cancel_notification_for_service_updates_letter_if_still_time_to_cancel( - admin_request, - sample_letter_notification, -): - sample_letter_notification.created_at = datetime(2018, 7, 7, 10, 0) - - response = admin_request.post( - "service.cancel_notification_for_service", - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - ) - assert response["status"] == "cancelled" - - def test_get_monthly_notification_data_by_service(mocker, admin_request): dao_mock = mocker.patch( "app.service.rest.fact_notification_status_dao.fetch_monthly_notification_statuses_per_service", diff --git a/tests/app/service/test_send_one_off_notification.py b/tests/app/service/test_send_one_off_notification.py index 1c703da59e..b9dc37cfd5 100644 --- a/tests/app/service/test_send_one_off_notification.py +++ b/tests/app/service/test_send_one_off_notification.py @@ -5,6 +5,7 @@ from notifications_utils import SMS_CHAR_COUNT_LIMIT from notifications_utils.recipients import InvalidPhoneError +from app.config import QueueNames from app.dao.service_safelist_dao import dao_add_and_commit_safelisted_contacts from app.models import ( EMAIL_TYPE, @@ -16,7 +17,11 @@ ServiceSafelist, ) from app.service.send_notification import send_one_off_notification -from app.v2.errors import BadRequestError, TooManyRequestsError +from app.v2.errors import ( + BadRequestError, + LiveServiceTooManyEmailRequestsError, + LiveServiceTooManySMSRequestsError, +) from tests.app.db import ( create_letter_contact, create_reply_to_email, @@ -54,7 +59,9 @@ def test_send_one_off_notification_calls_celery_correctly(persist_mock, celery_m assert resp == {"id": str(persist_mock.return_value.id)} - celery_mock.assert_called_once_with(notification=persist_mock.return_value, research_mode=False, queue=None) + celery_mock.assert_called_once_with( + notification=persist_mock.return_value, research_mode=False, queue=QueueNames.SEND_SMS_MEDIUM + ) def test_send_one_off_notification_calls_persist_correctly_for_sms(persist_mock, celery_mock, notify_db_session): @@ -183,10 +190,37 @@ def test_send_one_off_notification_honors_research_mode(notify_db_session, persi assert celery_mock.call_args[1]["research_mode"] is True -@pytest.mark.parametrize("process_type", ["priority", "bulk"]) -def test_send_one_off_notification_honors_process_type(notify_db_session, persist_mock, celery_mock, process_type): +@pytest.mark.parametrize( + "process_type, expected_queue", + [("priority", QueueNames.SEND_EMAIL_HIGH), ("bulk", QueueNames.SEND_EMAIL_MEDIUM), ("normal", QueueNames.SEND_EMAIL_MEDIUM)], +) +def test_send_one_off_email_notification_honors_process_type( + notify_db_session, persist_mock, celery_mock, process_type, expected_queue +): service = create_service() - template = create_template(service=service) + template = create_template(service=service, template_type=EMAIL_TYPE) + template.process_type = process_type + + post_data = { + "template_id": str(template.id), + "to": "test@test.com", + "created_by": str(service.created_by_id), + } + + send_one_off_notification(service.id, post_data) + + assert celery_mock.call_args[1]["queue"] == expected_queue + + +@pytest.mark.parametrize( + "process_type, expected_queue", + [("priority", QueueNames.SEND_SMS_HIGH), ("bulk", QueueNames.SEND_SMS_MEDIUM), ("normal", QueueNames.SEND_SMS_MEDIUM)], +) +def test_send_one_off_sms_notification_honors_process_type( + notify_db_session, persist_mock, celery_mock, process_type, expected_queue +): + service = create_service() + template = create_template(service=service, template_type=SMS_TYPE) template.process_type = process_type post_data = { @@ -197,7 +231,7 @@ def test_send_one_off_notification_honors_process_type(notify_db_session, persis send_one_off_notification(service.id, post_data) - assert celery_mock.call_args[1]["queue"] == f"{process_type}-tasks" + assert celery_mock.call_args[1]["queue"] == expected_queue def test_send_one_off_notification_raises_if_invalid_recipient(notify_db_session): @@ -246,12 +280,48 @@ def test_send_one_off_notification_raises_if_cant_send_to_recipient( assert "service is in trial mode" in e.value.message -def test_send_one_off_notification_raises_if_over_limit(notify_db_session, mocker): +def test_send_one_off_notification_raises_if_over_combined_limit(notify_db_session, notify_api, mocker): + service = create_service(message_limit=0) + template = create_template(service=service) + mocker.patch( + "app.service.send_notification.check_sms_daily_limit", + side_effect=LiveServiceTooManySMSRequestsError(1), + ) + + post_data = { + "template_id": str(template.id), + "to": "6502532222", + "created_by": str(service.created_by_id), + } + + with pytest.raises(LiveServiceTooManySMSRequestsError): + send_one_off_notification(service.id, post_data) + + +def test_send_one_off_notification_raises_if_over_email_limit(notify_db_session, notify_api, mocker): service = create_service(message_limit=0) + template = create_template(service=service, template_type=EMAIL_TYPE) + mocker.patch( + "app.service.send_notification.check_email_daily_limit", + side_effect=LiveServiceTooManyEmailRequestsError(1), + ) + + post_data = { + "template_id": str(template.id), + "to": "6502532222", + "created_by": str(service.created_by_id), + } + + with pytest.raises(LiveServiceTooManyEmailRequestsError): + send_one_off_notification(service.id, post_data) + + +def test_send_one_off_notification_raises_if_over_sms_daily_limit(notify_db_session, mocker): + service = create_service(sms_daily_limit=0) template = create_template(service=service) mocker.patch( - "app.service.send_notification.check_service_over_daily_message_limit", - side_effect=TooManyRequestsError(1), + "app.service.send_notification.check_sms_daily_limit", + side_effect=LiveServiceTooManySMSRequestsError(1), ) post_data = { @@ -260,7 +330,7 @@ def test_send_one_off_notification_raises_if_over_limit(notify_db_session, mocke "created_by": str(service.created_by_id), } - with pytest.raises(TooManyRequestsError): + with pytest.raises(LiveServiceTooManySMSRequestsError): send_one_off_notification(service.id, post_data) @@ -309,7 +379,7 @@ def test_send_one_off_notification_should_add_email_reply_to_text_for_notificati notification_id = send_one_off_notification(service_id=sample_email_template.service.id, post_data=data) notification = Notification.query.get(notification_id["id"]) - celery_mock.assert_called_once_with(notification=notification, research_mode=False, queue=None) + celery_mock.assert_called_once_with(notification=notification, research_mode=False, queue=QueueNames.SEND_EMAIL_MEDIUM) assert notification.reply_to_text == reply_to_email.email_address @@ -325,7 +395,7 @@ def test_send_one_off_letter_notification_should_use_template_reply_to_text(samp notification_id = send_one_off_notification(service_id=sample_letter_template.service.id, post_data=data) notification = Notification.query.get(notification_id["id"]) - celery_mock.assert_called_once_with(notification=notification, research_mode=False, queue=None) + celery_mock.assert_called_once_with(notification=notification, research_mode=False, queue=QueueNames.NORMAL) assert notification.reply_to_text == "Edinburgh, ED1 1AA" @@ -333,7 +403,6 @@ def test_send_one_off_letter_notification_should_use_template_reply_to_text(samp def test_send_one_off_letter_should_not_make_pdf_in_research_mode( sample_letter_template, ): - sample_letter_template.service.research_mode = True data = { @@ -361,7 +430,7 @@ def test_send_one_off_sms_notification_should_use_sms_sender_reply_to_text(sampl notification_id = send_one_off_notification(service_id=sample_service.id, post_data=data) notification = Notification.query.get(notification_id["id"]) - celery_mock.assert_called_once_with(notification=notification, research_mode=False, queue=None) + celery_mock.assert_called_once_with(notification=notification, research_mode=False, queue=QueueNames.SEND_SMS_MEDIUM) assert notification.reply_to_text == "+16502532222" @@ -379,7 +448,7 @@ def test_send_one_off_sms_notification_should_use_default_service_reply_to_text( notification_id = send_one_off_notification(service_id=sample_service.id, post_data=data) notification = Notification.query.get(notification_id["id"]) - celery_mock.assert_called_once_with(notification=notification, research_mode=False, queue=None) + celery_mock.assert_called_once_with(notification=notification, research_mode=False, queue=QueueNames.SEND_SMS_MEDIUM) assert notification.reply_to_text == "+16502532222" diff --git a/tests/app/service/test_send_pdf_letter_notification.py b/tests/app/service/test_send_pdf_letter_notification.py deleted file mode 100644 index b236c83cab..0000000000 --- a/tests/app/service/test_send_pdf_letter_notification.py +++ /dev/null @@ -1,111 +0,0 @@ -import uuid - -import pytest -from freezegun import freeze_time -from notifications_utils.s3 import S3ObjectNotFound - -from app.dao.notifications_dao import get_notification_by_id -from app.models import EMAIL_TYPE, LETTER_TYPE, UPLOAD_LETTERS -from app.service.send_notification import send_pdf_letter_notification -from app.v2.errors import BadRequestError, TooManyRequestsError -from tests.app.db import create_service - - -@pytest.mark.parametrize( - "permissions", - [ - [EMAIL_TYPE], - [LETTER_TYPE], - [UPLOAD_LETTERS], - ], -) -def test_send_pdf_letter_notification_raises_error_if_service_does_not_have_permission( - notify_db_session, - fake_uuid, - permissions, -): - service = create_service(service_permissions=permissions) - post_data = {"filename": "valid.pdf", "created_by": fake_uuid, "file_id": fake_uuid} - - with pytest.raises(BadRequestError): - send_pdf_letter_notification(service.id, post_data) - - -def test_send_pdf_letter_notification_raises_error_if_service_is_over_daily_message_limit( - mocker, - sample_service_full_permissions, - fake_uuid, -): - mocker.patch( - "app.service.send_notification.check_service_over_daily_message_limit", - side_effect=TooManyRequestsError(10), - ) - post_data = {"filename": "valid.pdf", "created_by": fake_uuid, "file_id": fake_uuid} - - with pytest.raises(TooManyRequestsError): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_validates_created_by(sample_service_full_permissions, fake_uuid, sample_user): - post_data = { - "filename": "valid.pdf", - "created_by": sample_user.id, - "file_id": fake_uuid, - } - - with pytest.raises(BadRequestError): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_raises_error_when_pdf_is_not_in_transient_letter_bucket( - mocker, - sample_service_full_permissions, - fake_uuid, - notify_user, -): - user = sample_service_full_permissions.users[0] - post_data = {"filename": "valid.pdf", "created_by": user.id, "file_id": fake_uuid} - mocker.patch( - "app.service.send_notification.utils_s3download", - side_effect=S3ObjectNotFound({}, ""), - ) - - with pytest.raises(S3ObjectNotFound): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -@freeze_time("2019-08-02 11:00:00") -def test_send_pdf_letter_notification_creates_notification_and_moves_letter( - mocker, - sample_service_full_permissions, - notify_user, -): - user = sample_service_full_permissions.users[0] - filename = "valid.pdf" - file_id = uuid.uuid4() - post_data = {"filename": filename, "created_by": user.id, "file_id": file_id} - - mocker.patch("app.service.send_notification.utils_s3download") - mocker.patch("app.service.send_notification.get_page_count", return_value=1) - s3_mock = mocker.patch("app.service.send_notification.move_uploaded_pdf_to_letters_bucket") - - result = send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - notification = get_notification_by_id(file_id) - - assert notification.id == file_id - assert notification.api_key_id is None - assert notification.client_reference == filename - assert notification.created_by_id == user.id - assert notification.postage == "second" - assert notification.notification_type == LETTER_TYPE - assert notification.billable_units == 1 - assert notification.to == filename - assert notification.service_id == sample_service_full_permissions.id - - assert result == {"id": str(notification.id)} - - s3_mock.assert_called_once_with( - "service-{}/{}.pdf".format(sample_service_full_permissions.id, file_id), - "2019-08-02/NOTIFY.{}.D.2.C.C.20190802110000.PDF".format(notification.reference), - ) diff --git a/tests/app/service/test_sender.py b/tests/app/service/test_sender.py index 1c48380c9c..64b1f0007e 100644 --- a/tests/app/service/test_sender.py +++ b/tests/app/service/test_sender.py @@ -4,8 +4,8 @@ from app.dao.services_dao import dao_add_user_to_service from app.models import EMAIL_TYPE, SMS_TYPE, Notification from app.service.sender import send_notification_to_service_users +from tests.app.conftest import create_sample_service from tests.app.conftest import notify_service as create_notify_service -from tests.app.conftest import sample_service as create_sample_service from tests.app.db import create_template, create_user diff --git a/tests/app/service/test_service_whitelist.py b/tests/app/service/test_service_whitelist.py index bea0ed07a3..573486b933 100644 --- a/tests/app/service/test_service_whitelist.py +++ b/tests/app/service/test_service_whitelist.py @@ -78,7 +78,6 @@ def test_update_safelist_replaces_old_safelist(client, sample_service_safelist): def test_update_safelist_doesnt_remove_old_safelist_if_error(client, sample_service_safelist): - data = {"email_addresses": [""], "phone_numbers": ["6502532222"]} response = client.put( diff --git a/tests/app/service/test_statistics.py b/tests/app/service/test_statistics.py index 66ff3016a4..ab3ef7968d 100644 --- a/tests/app/service/test_statistics.py +++ b/tests/app/service/test_statistics.py @@ -73,7 +73,6 @@ }, ) def test_format_statistics(stats, email_counts, sms_counts, letter_counts): - ret = format_statistics(stats) assert ret["email"] == {status: count for status, count in zip(["requested", "delivered", "failed"], email_counts)} diff --git a/tests/app/service/test_utils.py b/tests/app/service/test_utils.py index c0fda02a3f..8342286b7e 100644 --- a/tests/app/service/test_utils.py +++ b/tests/app/service/test_utils.py @@ -1,6 +1,8 @@ +import pytest from freezegun import freeze_time from app.dao.date_util import get_current_financial_year_start_year +from app.service.utils import get_organisation_id_from_crm_org_notes # see get_financial_year for conversion of financial years. @@ -15,3 +17,25 @@ def test_get_current_financial_year_start_year_before_march(): def test_get_current_financial_year_start_year_after_april(): current_fy = get_current_financial_year_start_year() assert current_fy == 2017 + + +@pytest.mark.parametrize( + "org_notes, expected_id", + [ + ("en_name_1 > xyz", "id1b"), + ("fr_name_1 > xyz", "id1b"), + ("en_name_2 > ", "id2b"), + ("fr_name_5 > ", None), + ("en_name_5 > xyz", None), + ("en_name_3 > xyz", None), + ("fr_name_3 > ", None), + ], +) +def test_get_organisation_id_from_crm_org_notes(mocker, org_notes, expected_id): + mock_gc_org_data = [ + {"id": "id1a", "name_eng": "en_name_1", "name_fra": "fr_name_1", "notify_organisation_id": "id1b"}, + {"id": "id2a", "name_eng": "en_name_2", "name_fra": "fr_name_2", "notify_organisation_id": "id2b"}, + {"id": "id3a", "name_eng": "en_name_3", "name_fra": "fr_name_3", "notify_organisation_id": None}, + ] + mocker.patch("app.service.utils.get_gc_organisation_data", return_value=mock_gc_org_data) + assert get_organisation_id_from_crm_org_notes(org_notes) == expected_id diff --git a/tests/app/status/test_status.py b/tests/app/status/test_status.py index 4bdfb56443..70973b32bb 100644 --- a/tests/app/status/test_status.py +++ b/tests/app/status/test_status.py @@ -13,6 +13,7 @@ def test_get_status_all_ok(client, notify_db_session, path): assert resp_json["db_version"] assert resp_json["commit_sha"] assert resp_json["build_time"] + assert resp_json["current_time_utc"] def test_empty_live_service_and_organisation_counts(admin_request): @@ -23,7 +24,6 @@ def test_empty_live_service_and_organisation_counts(admin_request): def test_populated_live_service_and_organisation_counts(admin_request): - # Org 1 has three real live services and one fake, for a total of 3 org_1 = create_organisation("org 1") live_service_1 = create_service(service_name="1") diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py index 58c0ef464c..142c5aefa3 100644 --- a/tests/app/template/test_rest.py +++ b/tests/app/template/test_rest.py @@ -1,7 +1,5 @@ import base64 import json -import random -import string import uuid from datetime import datetime, timedelta @@ -9,22 +7,29 @@ import pytest import requests_mock from freezegun import freeze_time -from notifications_utils import SMS_CHAR_COUNT_LIMIT +from notifications_utils import ( + EMAIL_CHAR_COUNT_LIMIT, + SMS_CHAR_COUNT_LIMIT, + TEMPLATE_NAME_CHAR_COUNT_LIMIT, +) from PyPDF2.utils import PdfReadError +from app.dao.organisation_dao import dao_update_organisation from app.dao.service_permissions_dao import dao_add_service_permission from app.dao.templates_dao import dao_get_template_by_id, dao_redact_template from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, Template, TemplateHistory +from app.template.rest import should_template_be_redacted from tests import create_authorization_header -from tests.app.conftest import sample_template as create_sample_template from tests.app.conftest import ( - sample_template_without_email_permission, - sample_template_without_letter_permission, - sample_template_without_sms_permission, + create_sample_template, + create_sample_template_without_email_permission, + create_sample_template_without_letter_permission, + create_sample_template_without_sms_permission, ) from tests.app.db import ( create_letter_contact, create_notification, + create_organisation, create_service, create_template, create_template_folder, @@ -85,7 +90,7 @@ def test_should_create_a_new_template_for_a_service(client, sample_user, templat template = Template.query.get(json_resp["data"]["id"]) from app.schemas import template_schema - assert sorted(json_resp["data"]) == sorted(template_schema.dump(template).data) + assert sorted(json_resp["data"]) == sorted(template_schema.dump(template)) def test_create_a_new_template_for_a_service_adds_folder_relationship(client, sample_service): @@ -263,15 +268,15 @@ def test_should_raise_error_on_create_if_no_permission(client, sample_user, perm "template_factory, expected_error", [ ( - sample_template_without_sms_permission, + create_sample_template_without_sms_permission, {"template_type": ["Updating text message templates is not allowed"]}, ), ( - sample_template_without_email_permission, + create_sample_template_without_email_permission, {"template_type": ["Updating email templates is not allowed"]}, ), ( - sample_template_without_letter_permission, + create_sample_template_without_letter_permission, {"template_type": ["Updating letter templates is not allowed"]}, ), ], @@ -407,6 +412,25 @@ def test_should_be_able_to_archive_template(client, sample_template): assert Template.query.first().archived +def test_should_be_able_to_archive_template_should_remove_template_folders(client, sample_service): + template_folder = create_template_folder(service=sample_service) + template = create_template(service=sample_service, folder=template_folder) + + data = { + "archived": True, + } + + client.post( + f"/service/{sample_service.id}/template/{template.id}", + headers=[("Content-Type", "application/json"), create_authorization_header()], + data=json.dumps(data), + ) + + updated_template = Template.query.get(template.id) + assert updated_template.archived + assert not updated_template.folder + + def test_get_precompiled_template_for_service( client, notify_user, @@ -533,7 +557,6 @@ def test_should_get_only_templates_for_that_service(admin_request, notify_db_ses ], ) def test_should_get_a_single_template(notify_db, client, sample_user, service_factory, subject, content, template_type): - template = create_sample_template( notify_db, notify_db.session, @@ -605,7 +628,6 @@ def test_should_preview_a_single_template( expected_content, expected_error, ): - template = create_sample_template( notify_db, notify_db.session, @@ -631,7 +653,6 @@ def test_should_preview_a_single_template( def test_should_return_empty_array_if_no_templates_for_service(client, sample_service): - auth_header = create_authorization_header() response = client.get("/service/{}/template".format(sample_service.id), headers=[auth_header]) @@ -642,7 +663,6 @@ def test_should_return_empty_array_if_no_templates_for_service(client, sample_se def test_should_return_404_if_no_templates_for_service_with_id(client, sample_service, fake_uuid): - auth_header = create_authorization_header() response = client.get( @@ -656,15 +676,88 @@ def test_should_return_404_if_no_templates_for_service_with_id(client, sample_se assert json_resp["message"] == "No result found" -def test_create_400_for_over_limit_content(client, notify_api, sample_user, sample_service, fake_uuid): - content = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(SMS_CHAR_COUNT_LIMIT + 1)) +@pytest.mark.parametrize( + "template_type, char_count_limit", [(SMS_TYPE, TEMPLATE_NAME_CHAR_COUNT_LIMIT), (EMAIL_TYPE, TEMPLATE_NAME_CHAR_COUNT_LIMIT)] +) +def test_update_template_400_for_over_limit_name( + client, mocker, sample_user, sample_service, sample_template, template_type, char_count_limit +): + mocked_update_template = mocker.patch("app.dao.templates_dao.dao_update_template") + name = "x" * (char_count_limit + 1) + template_data = { + "id": str(sample_template.id), + "name": name, + "template_type": template_type, + "content": "some content here :)", + "service": str(sample_service.id), + "created_by": str(sample_user.id), + } + if template_type == EMAIL_TYPE: + template_data.update({"subject": "subject"}) + request_data = json.dumps(template_data) + auth_header = create_authorization_header() + + response = client.post( + "/service/{}/template/{}".format(sample_service.id, sample_template.id), + headers=[("Content-Type", "application/json"), auth_header], + data=request_data, + ) + assert response.status_code == 400 + json_response = json.loads(response.get_data(as_text=True)) + assert (f"Template name must be less than {char_count_limit} characters") in json_response["message"]["name"] + mocked_update_template.assert_not_called() + + +@pytest.mark.parametrize( + "template_type, char_count_limit", [(SMS_TYPE, TEMPLATE_NAME_CHAR_COUNT_LIMIT), (EMAIL_TYPE, TEMPLATE_NAME_CHAR_COUNT_LIMIT)] +) +def test_create_template_400_for_over_limit_name(client, mocker, sample_user, sample_service, template_type, char_count_limit): + mocked_update_template = mocker.patch("app.dao.templates_dao.dao_create_template") + name = "x" * (char_count_limit + 1) + template_data = { + "name": name, + "template_type": template_type, + "content": "some content here :)", + "service": str(sample_service.id), + "created_by": str(sample_user.id), + } + if template_type == EMAIL_TYPE: + template_data.update({"subject": "subject"}) + request_data = json.dumps(template_data) + auth_header = create_authorization_header() + + response = client.post( + "/service/{}/template".format(sample_service.id), + headers=[("Content-Type", "application/json"), auth_header], + data=request_data, + ) + assert response.status_code == 400 + json_response = json.loads(response.get_data(as_text=True)) + assert (f"Template name must be less than {char_count_limit} characters") in json_response["message"]["name"] + mocked_update_template.assert_not_called() + + +@pytest.mark.parametrize( + "template_type, char_count_limit", + [ + (SMS_TYPE, SMS_CHAR_COUNT_LIMIT), + (EMAIL_TYPE, EMAIL_CHAR_COUNT_LIMIT), + ], +) +def test_create_400_for_over_limit_content( + client, notify_api, sample_user, sample_service, fake_uuid, template_type, char_count_limit +): + content = "x" * (char_count_limit + 1) data = { "name": "too big template", - "template_type": SMS_TYPE, + "template_type": template_type, "content": content, "service": str(sample_service.id), "created_by": str(sample_user.id), } + + if template_type == EMAIL_TYPE: + data.update({"subject": "subject"}) data = json.dumps(data) auth_header = create_authorization_header() @@ -675,29 +768,36 @@ def test_create_400_for_over_limit_content(client, notify_api, sample_user, samp ) assert response.status_code == 400 json_resp = json.loads(response.get_data(as_text=True)) - assert ("Content has a character count greater than the limit of {}").format(SMS_CHAR_COUNT_LIMIT) in json_resp["message"][ - "content" - ] + assert (f"Content has a character count greater than the limit of {char_count_limit}") in json_resp["message"]["content"] -def test_update_400_for_over_limit_content(client, notify_api, sample_user, sample_template): +@pytest.mark.parametrize( + "template_type, char_count_limit", + [ + (SMS_TYPE, SMS_CHAR_COUNT_LIMIT), + (EMAIL_TYPE, EMAIL_CHAR_COUNT_LIMIT), + ], +) +def test_update_400_for_over_limit_content( + client, notify_db, notify_db_session, notify_api, sample_user, template_type, char_count_limit +): json_data = json.dumps( { - "content": "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(SMS_CHAR_COUNT_LIMIT + 1)), + "content": "x" * (char_count_limit + 1), "created_by": str(sample_user.id), } ) auth_header = create_authorization_header() + + sample_template = create_sample_template(notify_db, notify_db_session, template_type=template_type) resp = client.post( - "/service/{}/template/{}".format(sample_template.service.id, sample_template.id), + f"/service/{sample_template.service.id}/template/{sample_template.id}", headers=[("Content-Type", "application/json"), auth_header], data=json_data, ) assert resp.status_code == 400 json_resp = json.loads(resp.get_data(as_text=True)) - assert ("Content has a character count greater than the limit of {}").format(SMS_CHAR_COUNT_LIMIT) in json_resp["message"][ - "content" - ] + assert (f"Content has a character count greater than the limit of {char_count_limit}") in json_resp["message"]["content"] def test_should_return_all_template_versions_for_service_and_template_id(client, sample_template): @@ -727,7 +827,6 @@ def test_should_return_all_template_versions_for_service_and_template_id(client, def test_update_does_not_create_new_version_when_there_is_no_change(client, sample_template): - auth_header = create_authorization_header() data = { "template_type": sample_template.template_type, @@ -786,7 +885,7 @@ def test_create_a_template_with_reply_to(admin_request, sample_user): template = Template.query.get(json_resp["data"]["id"]) from app.schemas import template_schema - assert sorted(json_resp["data"]) == sorted(template_schema.dump(template).data) + assert sorted(json_resp["data"]) == sorted(template_schema.dump(template)) th = TemplateHistory.query.filter_by(id=template.id, version=1).one() assert th.service_letter_contact_id == letter_contact.id @@ -1012,7 +1111,6 @@ def test_update_redact_template_400s_if_no_created_by(admin_request, sample_temp def test_preview_letter_template_by_id_invalid_file_type(sample_letter_notification, admin_request): - resp = admin_request.get( "template.preview_letter_template_by_notification_id", service_id=sample_letter_notification.service_id, @@ -1075,7 +1173,6 @@ def test_preview_letter_template_by_id_valid_file_type( def test_preview_letter_template_by_id_template_preview_500(notify_api, client, admin_request, sample_letter_notification): - with set_config_values( notify_api, { @@ -1109,7 +1206,6 @@ def test_preview_letter_template_by_id_template_preview_500(notify_api, client, def test_preview_letter_template_precompiled_pdf_file_type(notify_api, client, admin_request, sample_service, mocker): - template = create_template( sample_service, template_type="letter", @@ -1128,7 +1224,6 @@ def test_preview_letter_template_precompiled_pdf_file_type(notify_api, client, a }, ): with requests_mock.Mocker(): - content = b"\x00\x01" mock_get_letter_pdf = mocker.patch("app.template.rest.get_letter_pdf", return_value=content) @@ -1145,7 +1240,6 @@ def test_preview_letter_template_precompiled_pdf_file_type(notify_api, client, a def test_preview_letter_template_precompiled_s3_error(notify_api, client, admin_request, sample_service, mocker): - template = create_template( sample_service, template_type="letter", @@ -1164,7 +1258,6 @@ def test_preview_letter_template_precompiled_s3_error(notify_api, client, admin_ }, ): with requests_mock.Mocker(): - mocker.patch( "app.template.rest.get_letter_pdf", side_effect=botocore.exceptions.ClientError({"Error": {"Code": "403", "Message": "Unauthorized"}}, "GetObject"), @@ -1203,7 +1296,6 @@ def test_preview_letter_template_precompiled_png_file_type_or_pdf_with_overlay( post_url, overlay, ): - template = create_template( sample_service, template_type="letter", @@ -1222,7 +1314,6 @@ def test_preview_letter_template_precompiled_png_file_type_or_pdf_with_overlay( }, ): with requests_mock.Mocker() as request_mock: - pdf_content = b"\x00\x01" expected_returned_content = b"\x00\x02" @@ -1277,7 +1368,6 @@ def test_preview_letter_template_precompiled_png_file_type_hide_notify_tag_only_ page_number, expect_preview_url, ): - template = create_template( sample_service, template_type="letter", @@ -1317,7 +1407,6 @@ def test_preview_letter_template_precompiled_png_file_type_hide_notify_tag_only_ def test_preview_letter_template_precompiled_png_template_preview_500_error( notify_api, client, admin_request, sample_service, mocker ): - template = create_template( sample_service, template_type="letter", @@ -1336,7 +1425,6 @@ def test_preview_letter_template_precompiled_png_template_preview_500_error( }, ): with requests_mock.Mocker() as request_mock: - pdf_content = b"\x00\x01" png_content = b"\x00\x02" @@ -1366,7 +1454,6 @@ def test_preview_letter_template_precompiled_png_template_preview_500_error( def test_preview_letter_template_precompiled_png_template_preview_400_error( notify_api, client, admin_request, sample_service, mocker ): - template = create_template( sample_service, template_type="letter", @@ -1385,7 +1472,6 @@ def test_preview_letter_template_precompiled_png_template_preview_400_error( }, ): with requests_mock.Mocker() as request_mock: - pdf_content = b"\x00\x01" png_content = b"\x00\x02" @@ -1415,7 +1501,6 @@ def test_preview_letter_template_precompiled_png_template_preview_400_error( def test_preview_letter_template_precompiled_png_template_preview_pdf_error( notify_api, client, admin_request, sample_service, mocker ): - template = create_template( sample_service, template_type="letter", @@ -1434,7 +1519,6 @@ def test_preview_letter_template_precompiled_png_template_preview_pdf_error( }, ): with requests_mock.Mocker() as request_mock: - pdf_content = b"\x00\x01" png_content = b"\x00\x02" @@ -1466,3 +1550,78 @@ def test_preview_letter_template_precompiled_png_template_preview_pdf_error( ] == "Error extracting requested page from PDF file for notification_id {} type " "{} {}".format( notification.id, type(PdfReadError()), error_message ) + + +def test_should_template_be_redacted(): + some_org = create_organisation() + assert not should_template_be_redacted(some_org) + + dao_update_organisation(some_org.id, organisation_type="province_or_territory") + assert should_template_be_redacted(some_org) + + +def test_update_templates_category(sample_template, sample_template_category, admin_request): + admin_request.post( + "template.update_templates_category", + service_id=sample_template.service_id, + template_id=sample_template.id, + template_category_id=sample_template_category.id, + _expected_status=200, + ) + + template = dao_get_template_by_id(sample_template.id) + + assert template.template_category.id == sample_template_category.id + + +class TestTemplateCategory: + DEFAULT_TEMPLATE_CATEGORY_LOW = "0dda24c2-982a-4f44-9749-0e38b2607e89" + DEFAULT_TEMPLATE_CATEGORY_MEDIUM = "f75d6706-21b7-437e-b93a-2c0ab771e28e" + + # ensure that the process_type is overridden when a user changes categories + @pytest.mark.parametrize( + "template_category_id, expected_process_type", + [ + # category doesnt change, process_type should remain as priority + ( + "unchanged", + "priority", + ), + # category changes, process_type should be removed + ( + DEFAULT_TEMPLATE_CATEGORY_MEDIUM, + None, + ), + ], + ) + def test_process_type_should_be_reset_when_template_category_updated( + self, + sample_service, + sample_template_with_priority_override, + sample_user, + admin_request, + populate_generic_categories, + template_category_id, + expected_process_type, + notify_api, + ): + with set_config_values(notify_api, {"FF_TEMPLATE_CATEGORY": "true"}): # TODO remove statement when FF removed + template_orig = dao_get_template_by_id(sample_template_with_priority_override.id) + + calculated_tc = ( + template_category_id if template_category_id != "unchanged" else str(template_orig.template_category_id) + ) + admin_request.post( + "template.update_template", + service_id=sample_template_with_priority_override.service_id, + template_id=sample_template_with_priority_override.id, + _data={ + "template_category_id": calculated_tc, + "redact_personalisation": False, + }, + _expected_status=200, + ) + template = dao_get_template_by_id(sample_template_with_priority_override.id) + + assert str(template.template_category_id) == calculated_tc + assert template.process_type == expected_process_type diff --git a/tests/app/template/test_template_category_rest.py b/tests/app/template/test_template_category_rest.py new file mode 100644 index 0000000000..2669cc5978 --- /dev/null +++ b/tests/app/template/test_template_category_rest.py @@ -0,0 +1,149 @@ +import pytest +from flask import url_for + +from tests import create_authorization_header +from tests.app.conftest import create_sample_template + + +def test_should_create_new_template_category(client, notify_db, notify_db_session): + data = { + "name_en": "new english", + "name_fr": "new french", + "description_en": "new english description", + "description_fr": "new french description", + "sms_process_type": "bulk", + "email_process_type": "bulk", + "hidden": True, + } + auth_header = create_authorization_header() + + response = client.post( + url_for("template_category.create_template_category"), + headers=[("Content-Type", "application/json"), auth_header], + json=data, + ) + + assert response.status_code == 201 + assert response.json["template_category"]["name_en"] == "new english" + assert response.json["template_category"]["name_fr"] == "new french" + assert response.json["template_category"]["description_en"] == "new english description" + assert response.json["template_category"]["description_fr"] == "new french description" + assert response.json["template_category"]["sms_process_type"] == "bulk" + assert response.json["template_category"]["email_process_type"] == "bulk" + assert response.json["template_category"]["hidden"] + + +def test_get_template_category_by_id(client, sample_template_category): + auth_header = create_authorization_header() + response = client.get( + url_for("template_category.get_template_category", template_category_id=sample_template_category.id), + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == 200 + assert response.json["template_category"]["name_en"] == sample_template_category.name_en + assert response.json["template_category"]["name_fr"] == sample_template_category.name_fr + assert response.json["template_category"]["description_en"] == sample_template_category.description_en + assert response.json["template_category"]["description_fr"] == sample_template_category.description_fr + assert response.json["template_category"]["sms_process_type"] == sample_template_category.sms_process_type + assert response.json["template_category"]["email_process_type"] == sample_template_category.email_process_type + assert response.json["template_category"]["hidden"] == sample_template_category.hidden + + +def test_get_template_category_by_template_id(client, notify_db, notify_db_session, sample_template_category): + category = sample_template_category + template = create_sample_template(notify_db, notify_db_session, template_category=category) + + auth_header = create_authorization_header() + endpoint = url_for("template_category.get_template_category_by_template_id", template_id=template.id) + + response = client.get( + endpoint, + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == 200 + assert response.json["template_category"]["name_en"] == category.name_en + assert response.json["template_category"]["name_fr"] == category.name_fr + assert response.json["template_category"]["description_en"] == category.description_en + assert response.json["template_category"]["description_fr"] == category.description_fr + assert response.json["template_category"]["sms_process_type"] == category.sms_process_type + assert response.json["template_category"]["email_process_type"] == category.email_process_type + assert response.json["template_category"]["hidden"] == category.hidden + + +@pytest.mark.parametrize( + "template_type, hidden, expected_status_code, expected_msg", + [ + ("invalid_template_type", True, 400, "Invalid filter 'template_type', valid template_types: 'sms', 'email'"), + ("sms", "not_a_boolean", 200, None), + ("email", "True", 200, None), + ("email", "False", 200, None), + ("email", None, 200, None), + ("sms", "True", 200, None), + ("sms", "False", 200, None), + ("sms", None, 200, None), + (None, None, 200, None), + (None, "True", 200, None), + (None, "False", 200, None), + ], +) +def test_get_template_categories( + template_type, + hidden, + expected_status_code, + expected_msg, + sample_template_category, + client, + notify_db, + notify_db_session, + mocker, +): + auth_header = create_authorization_header() + + endpoint = url_for("template_category.get_template_categories", template_type=template_type, hidden=hidden) + + mocker.patch("app.dao.template_categories_dao.dao_get_all_template_categories", return_value=[sample_template_category]) + + response = client.get( + endpoint, + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == expected_status_code + if not expected_status_code == 200: + assert response.json["message"] == expected_msg + + +@pytest.mark.parametrize( + "cascade, expected_status_code, expected_msg", + [ + ("True", 204, ""), + ("False", 400, "Cannot delete categories associated with templates. Dissociate the category from templates first."), + ], +) +def test_delete_template_category_cascade( + cascade, + expected_status_code, + expected_msg, + client, + mocker, + sample_template_category_with_templates, + populate_generic_categories, +): + auth_header = create_authorization_header() + + endpoint = url_for( + "template_category.delete_template_category", + template_category_id=sample_template_category_with_templates.id, + cascade=cascade, + ) + + response = client.delete( + endpoint, + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == expected_status_code + if expected_status_code == 400: + assert response.json["message"] == expected_msg diff --git a/tests/app/template_statistics/test_rest.py b/tests/app/template_statistics/test_rest.py index 13c902d067..88080583ca 100644 --- a/tests/app/template_statistics/test_rest.py +++ b/tests/app/template_statistics/test_rest.py @@ -12,7 +12,7 @@ def set_up_get_all_from_hash(mock_redis, side_effect): redis returns binary strings for both keys and values - so given a list of side effects (return values), make sure """ - assert type(side_effect) == list + assert isinstance(side_effect, list) side_effects = [] for ret_val in side_effect: if ret_val is None: @@ -78,7 +78,6 @@ def test_get_template_statistics_for_service_by_day_accepts_old_query_string( @freeze_time("2018-01-02 12:00:00") def test_get_template_statistics_for_service_by_day_goes_to_db(admin_request, mocker, sample_template): - # first time it is called redis returns data, second time returns none mock_dao = mocker.patch( "app.template_statistics.rest.fetch_notification_status_for_service_for_today_and_7_previous_days", @@ -114,7 +113,6 @@ def test_get_template_statistics_for_service_by_day_goes_to_db(admin_request, mo def test_get_template_statistics_for_service_by_day_returns_empty_list_if_no_templates(admin_request, mocker, sample_service): - json_resp = admin_request.get( "template_statistics.get_template_statistics_for_service_by_day", service_id=sample_service.id, diff --git a/tests/app/test_cloudfoundry_config.py b/tests/app/test_cloudfoundry_config.py deleted file mode 100644 index 3fba45f7ef..0000000000 --- a/tests/app/test_cloudfoundry_config.py +++ /dev/null @@ -1,42 +0,0 @@ -import json -import os - -import pytest - -from app.cloudfoundry_config import extract_cloudfoundry_config, set_config_env_vars - - -@pytest.fixture -def postgres_config(): - return [{"credentials": {"uri": "postgres uri"}}] - - -@pytest.fixture -def cloudfoundry_config(postgres_config): - return {"postgres": postgres_config, "user-provided": []} - - -@pytest.fixture -def cloudfoundry_environ(monkeypatch, cloudfoundry_config): - monkeypatch.setenv("VCAP_SERVICES", json.dumps(cloudfoundry_config)) - monkeypatch.setenv("VCAP_APPLICATION", '{"space_name": "🚀🌌"}') - - -@pytest.mark.usefixtures("os_environ", "cloudfoundry_environ") -def test_extract_cloudfoundry_config_populates_other_vars(): - extract_cloudfoundry_config() - - assert os.environ["SQLALCHEMY_DATABASE_URI"] == "postgres uri" - assert os.environ["NOTIFY_ENVIRONMENT"] == "🚀🌌" - assert os.environ["NOTIFY_LOG_PATH"] == "/home/vcap/logs/app.log" - - -@pytest.mark.usefixtures("os_environ", "cloudfoundry_environ") -def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config): - cloudfoundry_config["foo"] = {"credentials": {"foo": "foo"}} - cloudfoundry_config["user-provided"].append({"name": "bar", "credentials": {"bar": "bar"}}) - - set_config_env_vars(cloudfoundry_config) - - assert "foo" not in os.environ - assert "bar" not in os.environ diff --git a/tests/app/test_config.py b/tests/app/test_config.py index c291c989f2..e27ba129d1 100644 --- a/tests/app/test_config.py +++ b/tests/app/test_config.py @@ -1,6 +1,5 @@ import importlib import os -from unittest import mock import pytest @@ -25,60 +24,51 @@ def reload_config(): importlib.reload(config) -def test_load_cloudfoundry_config_if_available(monkeypatch, reload_config): - os.environ["ADMIN_BASE_URL"] = "env" - monkeypatch.setenv("VCAP_SERVICES", "some json blob") - monkeypatch.setenv("VCAP_APPLICATION", "some json blob") - - with mock.patch("app.cloudfoundry_config.extract_cloudfoundry_config", side_effect=cf_conf) as cf_config: - # reload config so that its module level code (ie: all of it) is re-instantiated - importlib.reload(config) - - assert cf_config.called - - assert os.environ["ADMIN_BASE_URL"] == "cf" - assert config.Config.ADMIN_BASE_URL == "cf" - - -def test_load_config_if_cloudfoundry_not_available(monkeypatch, reload_config): - os.environ["ADMIN_BASE_URL"] = "env" - - monkeypatch.delenv("VCAP_SERVICES", raising=False) - - with mock.patch("app.cloudfoundry_config.extract_cloudfoundry_config") as cf_config: - # reload config so that its module level code (ie: all of it) is re-instantiated - importlib.reload(config) - - assert not cf_config.called - - assert os.environ["ADMIN_BASE_URL"] == "env" - assert config.Config.ADMIN_BASE_URL == "env" - - def test_queue_names_all_queues_correct(): # Need to ensure that all_queues() only returns queue names used in API queues = QueueNames.all_queues() - assert len(queues) == 14 - assert ( - set( - [ - QueueNames.PRIORITY, - QueueNames.BULK, - QueueNames.PERIODIC, - QueueNames.DATABASE, - QueueNames.SEND_SMS, - QueueNames.SEND_THROTTLED_SMS, - QueueNames.SEND_EMAIL, - QueueNames.RESEARCH_MODE, - QueueNames.REPORTING, - QueueNames.JOBS, - QueueNames.RETRY, - QueueNames.NOTIFY, - # QueueNames.CREATE_LETTERS_PDF, - QueueNames.CALLBACKS, - # QueueNames.LETTERS, - QueueNames.DELIVERY_RECEIPTS, - ] - ) - == set(queues) - ) + assert len(queues) == 22 + assert set( + [ + QueueNames.PRIORITY, + QueueNames.BULK, + QueueNames.PERIODIC, + QueueNames.DATABASE, + QueueNames.PRIORITY_DATABASE, + QueueNames.NORMAL_DATABASE, + QueueNames.BULK_DATABASE, + QueueNames.SEND_SMS_HIGH, + QueueNames.SEND_SMS_MEDIUM, + QueueNames.SEND_SMS_LOW, + QueueNames.SEND_THROTTLED_SMS, + QueueNames.SEND_EMAIL_HIGH, + QueueNames.SEND_EMAIL_MEDIUM, + QueueNames.SEND_EMAIL_LOW, + QueueNames.RESEARCH_MODE, + QueueNames.REPORTING, + QueueNames.JOBS, + QueueNames.RETRY, + QueueNames.CALLBACKS_RETRY, + QueueNames.NOTIFY, + # QueueNames.CREATE_LETTERS_PDF, + QueueNames.CALLBACKS, + # QueueNames.LETTERS, + QueueNames.DELIVERY_RECEIPTS, + ] + ) == set(queues) + + +def test_get_safe_config(mocker, reload_config): + mock_get_class_attrs = mocker.patch("notifications_utils.logging.get_class_attrs") + mock_get_sensitive_config = mocker.patch("app.config.Config.get_sensitive_config") + + config.Config.get_safe_config() + assert mock_get_class_attrs.called + assert mock_get_sensitive_config.called + + +def test_get_sensitive_config(): + sensitive_config = config.Config.get_sensitive_config() + assert sensitive_config + for key in sensitive_config: + assert key diff --git a/tests/app/test_email_limit_utils.py b/tests/app/test_email_limit_utils.py new file mode 100644 index 0000000000..f30ee8879a --- /dev/null +++ b/tests/app/test_email_limit_utils.py @@ -0,0 +1,46 @@ +import pytest +from notifications_utils.clients.redis import email_daily_count_cache_key + +from app.email_limit_utils import fetch_todays_email_count, increment_todays_email_count +from tests.conftest import set_config + + +class TestEmailLimits: + @pytest.mark.parametrize("redis_value, db_value, expected_result", [(None, 5, 5), ("3", 5, 3)]) + def test_fetch_todays_requested_email_count(self, client, mocker, sample_service, redis_value, db_value, expected_result): + cache_key = email_daily_count_cache_key(sample_service.id) + mocker.patch("app.redis_store.get", lambda x: redis_value if x == cache_key else None) + mocked_set = mocker.patch("app.redis_store.set") + mocker.patch("app.email_limit_utils.fetch_todays_total_email_count", return_value=db_value) + # mocker.patch("app.dao.users_dao.user_can_be_archived", return_value=False) + + with set_config(client.application, "REDIS_ENABLED", True): + actual_result = fetch_todays_email_count(sample_service.id) + + assert actual_result == expected_result + if redis_value is None: + assert mocked_set.called_once_with( + cache_key, + db_value, + ) + else: + mocked_set.assert_not_called() + + @pytest.mark.parametrize("redis_value, db_value, increment_by", [(None, 5, 5), ("3", 5, 3)]) + def test_increment_todays_requested_email_count(self, mocker, sample_service, redis_value, db_value, increment_by): + cache_key = email_daily_count_cache_key(sample_service.id) + mocker.patch("app.redis_store.get", lambda x: redis_value if x == cache_key else None) + mocked_set = mocker.patch("app.redis_store.set") + mocked_incrby = mocker.patch("app.redis_store.incrby") + mocker.patch("app.email_limit_utils.fetch_todays_email_count", return_value=db_value) + + increment_todays_email_count(sample_service.id, increment_by) + + assert mocked_incrby.called_once_with(cache_key, increment_by) + if redis_value is None: + assert mocked_set.called_once_with( + cache_key, + db_value, + ) + else: + mocked_set.assert_not_called() diff --git a/tests/app/test_encryption.py b/tests/app/test_encryption.py index a985e04ea8..7115588e44 100644 --- a/tests/app/test_encryption.py +++ b/tests/app/test_encryption.py @@ -1,20 +1,57 @@ -from app.encryption import Encryption +import pytest +from itsdangerous import BadSignature -encryption = Encryption() +from app.encryption import CryptoSigner -def test_should_encrypt_content(notify_api): - encryption.init_app(notify_api) - assert encryption.encrypt("this") != "this" +class TestEncryption: + def test_sign_and_verify(self, notify_api): + signer = CryptoSigner() + signer.init_app(notify_api, "secret", "salt") + signed = signer.sign("this") + assert signed != "this" + assert signer.verify(signed) == "this" + def test_should_not_verify_content_signed_with_different_secrets(self, notify_api): + signer1 = CryptoSigner() + signer2 = CryptoSigner() + signer1.init_app(notify_api, "secret1", "salt") + signer2.init_app(notify_api, "secret2", "salt") + with pytest.raises(BadSignature): + signer2.verify(signer1.sign("this")) -def test_should_decrypt_content(notify_api): - encryption.init_app(notify_api) - encrypted = encryption.encrypt("this") - assert encryption.decrypt(encrypted) == "this" + def test_should_not_verify_content_signed_with_different_salts(self, notify_api): + signer1 = CryptoSigner() + signer2 = CryptoSigner() + signer1.init_app(notify_api, "secret", "salt1") + signer2.init_app(notify_api, "secret", "salt2") + with pytest.raises(BadSignature): + signer2.verify(signer1.sign("this")) + def test_should_sign_dicts(self, notify_api): + signer = CryptoSigner() + signer.init_app(notify_api, "secret", "salt") + assert signer.verify(signer.sign({"this": "that"})) == {"this": "that"} -def test_should_encrypt_json(notify_api): - encryption.init_app(notify_api) - encrypted = encryption.encrypt({"this": "that"}) - assert encryption.decrypt(encrypted) == {"this": "that"} + def test_should_verify_content_signed_with_an_old_secret(self, notify_api): + signer1 = CryptoSigner() + signer2 = CryptoSigner() + signer1.init_app(notify_api, ["s1", "s2"], "salt") + signer2.init_app(notify_api, ["s2", "s3"], "salt") + assert signer2.verify(signer1.sign("this")) == "this" + + def test_should_unsafe_verify_content_signed_with_different_secrets(self, notify_api): + signer1 = CryptoSigner() + signer2 = CryptoSigner() + signer1.init_app(notify_api, "secret1", "salt") + signer2.init_app(notify_api, "secret2", "salt") + assert signer2.verify_unsafe(signer1.sign("this")) == "this" + + def test_sign_with_all_keys(self, notify_api): + signer1 = CryptoSigner() + signer1.init_app(notify_api, "s1", "salt") + signer2 = CryptoSigner() + signer2.init_app(notify_api, "s2", "salt") + signer12 = CryptoSigner() + signer12.init_app(notify_api, ["s1", "s2"], "salt") + assert signer12.sign_with_all_keys("this") == [signer2.sign("this"), signer1.sign("this")] diff --git a/tests/app/test_json_provider.py b/tests/app/test_json_provider.py new file mode 100644 index 0000000000..15dc693d40 --- /dev/null +++ b/tests/app/test_json_provider.py @@ -0,0 +1,17 @@ +import pytest +from sqlalchemy.engine.row import Row + +from app.json_provider import NotifyJSONProvider + + +class TestNotifyJSONProvider: + @pytest.fixture() + def row(self, mocker): + row = mocker.patch("sqlalchemy.engine.row.Row", spec=Row) + row._asdict.return_value = {"key1": "value1", "key2": "value2"} + return row + + def test_serialization_row(self, notify_api, row): + jp = NotifyJSONProvider(notify_api) + serialized: str = jp.dumps(row) # type: ignore + assert '{"key1": "value1", "key2": "value2"}' in serialized diff --git a/tests/app/test_model.py b/tests/app/test_model.py index 931a45c928..5e3df7387f 100644 --- a/tests/app/test_model.py +++ b/tests/app/test_model.py @@ -2,10 +2,12 @@ from freezegun import freeze_time from sqlalchemy.exc import IntegrityError -from app import encryption +from app import signer_personalisation from app.models import ( + BULK, EMAIL_TYPE, MOBILE_TYPE, + NORMAL, NOTIFICATION_CREATED, NOTIFICATION_DELIVERED, NOTIFICATION_FAILED, @@ -16,10 +18,12 @@ NOTIFICATION_STATUS_TYPES_FAILED, NOTIFICATION_TECHNICAL_FAILURE, PRECOMPILED_TEMPLATE_NAME, + PRIORITY, SMS_TYPE, Notification, ServiceSafelist, ) +from tests.app.conftest import create_template_category from tests.app.db import ( create_inbound_number, create_letter_contact, @@ -127,24 +131,30 @@ def test_notification_for_csv_returns_correct_job_row_number(sample_job): @freeze_time("2016-01-30 12:39:58.321312") @pytest.mark.parametrize( - "template_type, status, expected_status", + "template_type, status, feedback_subtype, expected_status", [ - ("email", "failed", "Failed"), - ("email", "technical-failure", "Technical failure"), - ("email", "temporary-failure", "Inbox not accepting messages right now"), - ("email", "permanent-failure", "Email address doesn’t exist"), - ("sms", "temporary-failure", "Phone not accepting messages right now"), - ("sms", "permanent-failure", "Phone number doesn’t exist"), - ("sms", "sent", "Sent"), - ("letter", "created", "Accepted"), - ("letter", "sending", "Accepted"), - ("letter", "technical-failure", "Technical failure"), - ("letter", "delivered", "Received"), + ("email", "failed", None, "Failed"), + ("email", "technical-failure", None, "Tech issue"), + ("email", "temporary-failure", None, "Content or inbox issue"), + ("email", "permanent-failure", None, "No such address"), + ("email", "permanent-failure", "suppressed", "Blocked"), + ("email", "permanent-failure", "on-account-suppression-list", "Blocked"), + ("sms", "temporary-failure", None, "Carrier issue"), + ("sms", "permanent-failure", None, "No such number"), + ("sms", "sent", None, "Sent"), + ("letter", "created", None, "Accepted"), + ("letter", "sending", None, "Accepted"), + ("letter", "technical-failure", None, "Technical failure"), + ("letter", "delivered", None, "Received"), ], ) -def test_notification_for_csv_returns_formatted_status(sample_service, template_type, status, expected_status): +def test_notification_for_csv_returns_formatted_status_ff_bouncerate( + sample_service, template_type, status, feedback_subtype, expected_status +): template = create_template(sample_service, template_type=template_type) notification = save_notification(create_notification(template, status=status)) + if feedback_subtype: + notification.feedback_subtype = feedback_subtype serialized = notification.serialize_for_csv() assert serialized["status"] == expected_status @@ -166,7 +176,7 @@ def test_notification_personalisation_getter_returns_empty_dict_from_None(): def test_notification_personalisation_getter_always_returns_empty_dict(): noti = Notification() - noti._personalisation = encryption.encrypt({}) + noti._personalisation = signer_personalisation.sign({}) assert noti.personalisation == {} @@ -175,7 +185,7 @@ def test_notification_personalisation_setter_always_sets_empty_dict(input_value) noti = Notification() noti.personalisation = input_value - assert noti._personalisation == encryption.encrypt({}) + assert noti._personalisation == signer_personalisation.sign({}) def test_notification_subject_is_none_for_sms(): @@ -334,19 +344,6 @@ def test_is_precompiled_letter_name_correct_not_hidden(sample_letter_template): assert not sample_letter_template.is_precompiled_letter -@pytest.mark.parametrize( - "process_type, expected_queue", - [ - ("normal", None), - ("priority", "priority-tasks"), - ("bulk", "bulk-tasks"), - ], -) -def test_template_queue_to_use(sample_service, process_type, expected_queue): - template = create_template(sample_service, process_type=process_type) - assert template.queue_to_use() == expected_queue - - def test_template_folder_is_parent(sample_service): x = None folders = [] @@ -361,6 +358,34 @@ def test_template_folder_is_parent(sample_service): assert not folders[1].is_parent_of(folders[0]) +@pytest.mark.parametrize( + "template_type, process_type, sms_process_type, email_process_type, expected_template_process_type", + [ + (SMS_TYPE, None, NORMAL, BULK, NORMAL), + (EMAIL_TYPE, None, BULK, NORMAL, NORMAL), + (SMS_TYPE, BULK, PRIORITY, PRIORITY, BULK), + (EMAIL_TYPE, BULK, PRIORITY, PRIORITY, BULK), + ], +) +def test_template_process_type( + notify_db, + notify_db_session, + template_type, + process_type, + sms_process_type, + email_process_type, + expected_template_process_type, +): + template_category = create_template_category( + notify_db, notify_db_session, sms_process_type=sms_process_type, email_process_type=email_process_type + ) + template = create_template( + service=create_service(), template_type=template_type, process_type=process_type, template_category=template_category + ) + + assert template.template_process_type == expected_template_process_type + + def test_fido2_key_serialization(sample_fido2_key): json = sample_fido2_key.serialize() assert json["name"] == sample_fido2_key.name @@ -371,3 +396,10 @@ def test_login_event_serialization(sample_login_event): json = sample_login_event.serialize() assert json["data"] == sample_login_event.data assert json["created_at"] + + +class TestNotificationModel: + def test_queue_name_in_notifications(self, sample_service): + template = create_template(sample_service, template_type="email") + notification = save_notification(create_notification(template, to_field="test@example.com", queue_name="tester")) + assert notification.queue_name == "tester" diff --git a/tests/app/test_queue.py b/tests/app/test_queue.py new file mode 100644 index 0000000000..f3581c8651 --- /dev/null +++ b/tests/app/test_queue.py @@ -0,0 +1,442 @@ +import time +from contextlib import contextmanager +from os import getenv +from unittest import mock +from urllib.parse import urlparse +from uuid import uuid4 + +import pytest +from flask import Flask +from pytest_mock_resources import RedisConfig, create_redis_fixture + +from app import create_app, flask_redis, metrics_logger +from app.config import Config, Test +from app.queue import Buffer, MockQueue, RedisQueue, generate_element + + +@pytest.fixture(scope="session") +def pmr_redis_config(): + parsed_uri = urlparse(getenv("REDIS_URL")) + return RedisConfig(image="redis:6.2", host=parsed_uri.hostname, port="6380", ci_port="6380") + + +redis = create_redis_fixture(scope="function") +REDIS_ELEMENTS_COUNT = 123 + +QNAME_SUFFIX = "qsuffix" +PROCESS_TYPE = "process_type" + + +class TestBuffer: + def test_when_name_suffix_is_not_supplied(self): + assert Buffer.INBOX.inbox_name() == "inbox" + assert Buffer.IN_FLIGHT.inbox_name() == "in-flight" + + def test_when_name_suffix_is_supplied(self): + assert Buffer.INBOX.inbox_name("test") == "inbox:test" + assert Buffer.IN_FLIGHT.inbox_name("test") == "in-flight:test" + + def test_when_name_suffix_process_is_supplied(self): + assert Buffer.INBOX.inbox_name("test", "normal") == "inbox:test:normal" + assert Buffer.IN_FLIGHT.inbox_name("test", "normal") == "in-flight:test:normal" + + def test_when_get_inflight_name_suffix_is_not_supplied(self): + receipt = uuid4() + assert Buffer.INBOX.inflight_name(receipt=receipt) == f"in-flight:{receipt}" + assert Buffer.IN_FLIGHT.inflight_name(receipt=receipt) == f"in-flight:{receipt}" + + def test_when_get_inflight_name_suffix_is_supplied(self): + receipt = uuid4() + assert Buffer.INBOX.inflight_name(receipt=receipt, suffix="test") == f"in-flight:test:{receipt}" + assert Buffer.IN_FLIGHT.inflight_name(receipt=receipt, suffix="test") == f"in-flight:test:{receipt}" + + def test_when_get_inflight_name_suffix_process_type_is_supplied(self): + receipt = uuid4() + assert ( + Buffer.INBOX.inflight_name(receipt=receipt, suffix="test", process_type="normal") + == f"in-flight:test:normal:{receipt}" + ) + assert ( + Buffer.IN_FLIGHT.inflight_name(receipt=receipt, suffix="test", process_type="normal") + == f"in-flight:test:normal:{receipt}" + ) + + +class TestRedisQueue: + @pytest.fixture(autouse=True) + def app(self): + config: Config = Test() # type: ignore + config.REDIS_ENABLED = True + app = Flask(config.NOTIFY_ENVIRONMENT) + create_app(app, config) + ctx = app.app_context() + ctx.push() + with app.test_request_context(): + yield app + ctx.pop() + return app + + @pytest.fixture() + def redis_queue(self, app): + q = RedisQueue(QNAME_SUFFIX, expire_inflight_after_seconds=1) + q.init_app(flask_redis, metrics_logger) + return q + + @pytest.fixture() + def redis_queue_with_process(self, app): + q = RedisQueue(QNAME_SUFFIX, expire_inflight_after_seconds=1, process_type=PROCESS_TYPE) + q.init_app(flask_redis, metrics_logger) + return q + + @contextmanager + def given_inbox_with_one_element(self, redis, redis_queue): + self.delete_all_list(redis) + notification = generate_element() + try: + redis_queue.publish(notification) + yield + finally: + self.delete_all_list(redis) + + @contextmanager + def given_inbox_with_one_element_process_type(self, redis, redis_queue_with_process): + self.delete_all_list(redis) + notification = generate_element() + try: + redis_queue_with_process.publish(notification) + yield + finally: + self.delete_all_list(redis) + + @contextmanager + def given_inbox_with_many_indexes(self, redis, redis_queue): + self.delete_all_list(redis) + try: + indexes = [str(i) for i in range(0, REDIS_ELEMENTS_COUNT)] + [redis_queue.publish(index) for index in indexes] + yield + finally: + self.delete_all_list(redis) + + @contextmanager + def given_inbox_with_many_indexes_process_type(self, redis, redis_queue_with_process): + self.delete_all_list(redis) + try: + indexes = [str(i) for i in range(0, REDIS_ELEMENTS_COUNT)] + [redis_queue_with_process.publish(index) for index in indexes] + yield + finally: + self.delete_all_list(redis) + + @pytest.mark.serial + def delete_all_list(self, redis): + self.delete_all_inbox(redis) + self.delete_all_inflight(redis) + + @pytest.mark.serial + def delete_all_inbox(self, redis): + for key in redis.scan_iter(f"{Buffer.INBOX.value}*"): + redis.delete(key) + + @pytest.mark.serial + def delete_all_inflight(self, redis): + for key in redis.scan_iter(f"{Buffer.IN_FLIGHT.value}*"): + redis.delete(key) + + @pytest.mark.serial + def test_put_messages(self, redis, redis_queue): + element = generate_element() + redis_queue.publish(element) + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == 1 + self.delete_all_list(redis) + + @pytest.mark.serial + def test_put_messages_with_process(self, redis, redis_queue_with_process): + element = generate_element() + redis_queue_with_process.publish(element) + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX, process_type=PROCESS_TYPE)) == 1 + self.delete_all_list(redis) + + @pytest.mark.serial + def test_polling_message(self, redis, redis_queue): + with self.given_inbox_with_one_element(redis, redis_queue): + (receipt, elements) = redis_queue.poll(10) + assert len(elements) == 1 + assert isinstance(elements[0], str) + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX)) == 1 + self.delete_all_list(redis) + + @pytest.mark.serial + def test_polling_message_with_process(self, redis, redis_queue_with_process): + with self.given_inbox_with_one_element_process_type(redis, redis_queue_with_process): + (receipt, elements) = redis_queue_with_process.poll(10) + assert len(elements) == 1 + assert isinstance(elements[0], str) + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX, process_type=PROCESS_TYPE)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX, process_type=PROCESS_TYPE)) == 1 + self.delete_all_list(redis) + + @pytest.mark.serial + @pytest.mark.parametrize("count", [0, 1, 98, 99, 100, 101, REDIS_ELEMENTS_COUNT, REDIS_ELEMENTS_COUNT + 1, 500]) + def test_polling_many_messages(self, redis, redis_queue, count): + with self.given_inbox_with_many_indexes(redis, redis_queue): + real_count = count if count < REDIS_ELEMENTS_COUNT else REDIS_ELEMENTS_COUNT + (receipt, elements) = redis_queue.poll(count) + assert len(elements) == real_count + if count < REDIS_ELEMENTS_COUNT: + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) > 0 + else: + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX)) == real_count + self.delete_all_list(redis) + + @pytest.mark.serial + @pytest.mark.parametrize("count", [0, 1, 98, 99, 100, 101, REDIS_ELEMENTS_COUNT, REDIS_ELEMENTS_COUNT + 1, 500]) + def test_polling_many_messages_with_process(self, redis, redis_queue_with_process, count): + with self.given_inbox_with_many_indexes(redis, redis_queue_with_process): + real_count = count if count < REDIS_ELEMENTS_COUNT else REDIS_ELEMENTS_COUNT + (receipt, elements) = redis_queue_with_process.poll(count) + assert len(elements) == real_count + if count < REDIS_ELEMENTS_COUNT: + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX, process_type=PROCESS_TYPE)) > 0 + else: + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX, process_type=PROCESS_TYPE)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX, process_type=PROCESS_TYPE)) == real_count + self.delete_all_list(redis) + + @pytest.mark.serial + @pytest.mark.parametrize("suffix", ["smss", "emails", "🎅", "", None]) + def test_polling_message_with_custom_inbox_name(self, redis, suffix): + self.delete_all_list(redis) + try: + redis_queue = RedisQueue(suffix) + redis_queue.init_app(flask_redis, metrics_logger) + element = generate_element() + redis_queue.publish(element) + assert redis.llen(Buffer.INBOX.inbox_name(suffix)) == 1 + + (receipt, elements) = redis_queue.poll(10) + assert len(elements) == 1 + assert redis.llen(Buffer.INBOX.inbox_name(suffix)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, suffix)) == 1 + + redis_queue.acknowledge(receipt) + assert redis.llen(Buffer.INBOX.inbox_name(suffix)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, suffix)) == 0 + finally: + self.delete_all_list(redis) + + @pytest.mark.serial + def test_polling_with_empty_inbox(self, redis, redis_queue): + self.delete_all_list(redis) + (receipt, elements) = redis_queue.poll(10) + assert len(elements) == 0 + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX)) == 0 + + @pytest.mark.serial + def test_polling_with_zero_count(self, redis, redis_queue): + with self.given_inbox_with_one_element(redis, redis_queue): + (receipt, elements) = redis_queue.poll(0) + assert len(elements) == 0 + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == 1 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX)) == 0 + + @pytest.mark.serial + def test_polling_with_negative_count(self, redis, redis_queue): + with self.given_inbox_with_one_element(redis, redis_queue): + (receipt, elements) = redis_queue.poll(-1) + assert len(elements) == 0 + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == 1 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX)) == 0 + + @pytest.mark.serial + def test_acknowledged_messages(self, redis, redis_queue): + with self.given_inbox_with_one_element(redis, redis_queue): + (receipt, elements) = redis_queue.poll(10) + assert redis_queue.acknowledge(receipt) + assert len(elements) > 0 + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX)) == 0 + assert len(redis.keys("*")) == 0 + + @pytest.mark.serial + def test_acknowledge_invalid_inflight(self, redis, redis_queue): + with self.given_inbox_with_one_element(redis, redis_queue): + (receipt, elements) = redis_queue.poll(10) + assert not redis_queue.acknowledge("11111111-1111-1111-1111-1111") + assert len(elements) > 0 + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == 0 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX)) == 1 + assert len(redis.keys("*")) == 1 + + @pytest.mark.serial + def test_expire_inflights(self, redis, redis_queue): + with self.given_inbox_with_many_indexes(redis, redis_queue): + inbox_name = Buffer.INBOX.inbox_name(QNAME_SUFFIX) + expected_inbox_contents = redis.lrange(inbox_name, 0, REDIS_ELEMENTS_COUNT) + redis.set("not_inflight", "test") + (receipt1, _) = redis_queue.poll(10) + redis_queue.poll(10) + redis_queue.poll(10) + time.sleep(2) + redis_queue.expire_inflights() + + assert redis.llen(inbox_name) == REDIS_ELEMENTS_COUNT + actual_inbox_contents = redis.lrange(inbox_name, 0, REDIS_ELEMENTS_COUNT) + assert sorted(expected_inbox_contents) == sorted(actual_inbox_contents) + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt1, QNAME_SUFFIX)) == 0 + assert redis.get("not_inflight") == b"test" + + @pytest.mark.serial + def test_expire_inflights_does_not_expire_early(self, redis, redis_queue): + with self.given_inbox_with_many_indexes(redis, redis_queue): + (receipt, _) = redis_queue.poll(10) + redis_queue.expire_inflights() + assert redis.llen(Buffer.INBOX.inbox_name(QNAME_SUFFIX)) == REDIS_ELEMENTS_COUNT - 10 + assert redis.llen(Buffer.IN_FLIGHT.inflight_name(receipt, QNAME_SUFFIX)) == 10 + + @pytest.mark.serial + def test_messages_serialization_after_poll(self, redis, redis_queue): + self.delete_all_list(redis) + notification = ( + "{'id': '0ba0ff51-ec82-4835-b828-a24fec6124ab', 'type': 'email', 'email_address': 'success@simulator.amazonses.com'}" + ) + redis_queue.publish(notification) + (_, elements) = redis_queue.poll(1) + + assert len(elements) > 0 + assert type(elements) is list + assert type(elements[0]) is str + assert elements[0] == notification + + self.delete_all_list(redis) + + +@pytest.mark.usefixtures("notify_api") +class TestMockQueue: + @pytest.fixture + def mock_queue(self): + return MockQueue() + + def test_polling_messages_from_queue(self, mock_queue): + (receipt, elements) = mock_queue.poll(10) + assert elements is not None + assert len(elements) == 10 + + def test_publish_mesages_on_queue(self, mock_queue): + element = generate_element() + mock_queue.publish(element) + + # This should not add change internal data structure + # or differ from a random output generation due to the + # nature of MockQueue. + (_, elements) = mock_queue.poll(1) + assert elements is not None + assert len(elements) == 1 + assert element != elements[0] + + def test_acknowledged_messages(self, mock_queue): + mock_queue.acknowledge([1, 2, 3]) + + +class TestRedisQueueMetricUsage: + @pytest.fixture(autouse=True) + def app(self): + config: Config = Test() # type: ignore + config.REDIS_ENABLED = True + app = Flask(config.NOTIFY_ENVIRONMENT) + create_app(app, config) + ctx = app.app_context() + ctx.push() + with app.test_request_context(): + yield app + ctx.pop() + return app + + @pytest.fixture() + def redis_queue(self, app): + q = RedisQueue(QNAME_SUFFIX, expire_inflight_after_seconds=1) + q.init_app(flask_redis, metrics_logger) + return q + + @contextmanager + def given_inbox_with_one_element(self, redis, redis_queue): + self.delete_all_list(redis) + notification = generate_element() + try: + redis_queue.publish(notification) + yield + finally: + self.delete_all_list(redis) + + @contextmanager + def given_inbox_with_many_indexes(self, redis, redis_queue): + self.delete_all_list(redis) + try: + indexes = [str(i) for i in range(0, REDIS_ELEMENTS_COUNT)] + [redis_queue.publish(index) for index in indexes] + yield + finally: + self.delete_all_list(redis) + + @pytest.mark.serial + def delete_all_list(self, redis): + self.delete_all_inbox(redis) + self.delete_all_inflight(redis) + + @pytest.mark.serial + def delete_all_inbox(self, redis): + for key in redis.scan_iter(f"{Buffer.INBOX.value}*"): + redis.delete(key) + + @pytest.mark.serial + def delete_all_inflight(self, redis): + for key in redis.scan_iter(f"{Buffer.IN_FLIGHT.value}*"): + redis.delete(key) + + @pytest.mark.serial + def test_put_batch_saving_metric(self, redis, redis_queue, mocker): + pbsm_mock = mocker.patch("app.queue.put_batch_saving_metric") + element = generate_element() + redis_queue.publish(element) + assert pbsm_mock.assert_called_with(mock.ANY, mock.ANY, 1) is None + self.delete_all_list(redis) + + @pytest.mark.serial + def test_polling_metric(self, redis, redis_queue, mocker): + with self.given_inbox_with_one_element(redis, redis_queue): + pbsim_mock = mocker.patch("app.queue.put_batch_saving_inflight_metric") + redis_queue.poll(10) + assert pbsim_mock.assert_called_with(mock.ANY, mock.ANY, 1) is None + + @pytest.mark.serial + def test_polling_metric_no_results(self, redis, redis_queue, mocker): + with self.given_inbox_with_one_element(redis, redis_queue): + pbsim_mock = mocker.patch("app.queue.put_batch_saving_inflight_metric") + redis_queue.poll(10) + assert pbsim_mock.assert_called_with(mock.ANY, mock.ANY, 1) is None + pbsim_mock.reset_mock() + redis_queue.poll(10) + assert not pbsim_mock.called, "put_batch_saving_inflight_metric was called and should not have been" + + @pytest.mark.serial + def test_acknowledged_metric(self, redis, redis_queue, mocker): + with self.given_inbox_with_one_element(redis, redis_queue): + pbsip_mock = mocker.patch("app.queue.put_batch_saving_inflight_processed") + (receipt, _) = redis_queue.poll(10) + redis_queue.acknowledge(receipt) + assert pbsip_mock.assert_called_with(mock.ANY, mock.ANY, 1) is None + + def test_put_batch_saving_expiry_metric(self, redis, redis_queue, mocker): + with self.given_inbox_with_many_indexes(redis, redis_queue): + pbsem_mock = mocker.patch("app.queue.put_batch_saving_expiry_metric") + Buffer.INBOX.inbox_name(QNAME_SUFFIX) + redis.set("not_inflight", "test") + redis_queue.poll(10) + redis_queue.poll(10) + redis_queue.poll(10) + time.sleep(2) + redis_queue.expire_inflights() + assert pbsem_mock.assert_called_with(mock.ANY, mock.ANY, 3) is None diff --git a/tests/app/test_schemas.py b/tests/app/test_schemas.py index 915cbbef65..2a750d4f67 100644 --- a/tests/app/test_schemas.py +++ b/tests/app/test_schemas.py @@ -13,16 +13,15 @@ def test_job_schema_doesnt_return_notifications(sample_notification_with_job): job = sample_notification_with_job.job assert job.notifications.count() == 1 - data, errors = job_schema.dump(job) + data = job_schema.dump(job) - assert not errors assert "notifications" not in data def test_notification_schema_ignores_absent_api_key(sample_notification_with_job): from app.schemas import notification_with_template_schema - data = notification_with_template_schema.dump(sample_notification_with_job).data + data = notification_with_template_schema.dump(sample_notification_with_job) assert data["key_name"] is None @@ -32,7 +31,7 @@ def test_notification_schema_adds_api_key_name(sample_notification): api_key = create_api_key(sample_notification.service, key_name="Test key") sample_notification.api_key = api_key - data = notification_with_template_schema.dump(sample_notification).data + data = notification_with_template_schema.dump(sample_notification) assert data["key_name"] == "Test key" @@ -48,7 +47,7 @@ def test_notification_schema_adds_api_key_name(sample_notification): def test_notification_schema_has_correct_status(sample_notification, schema_name): from app import schemas - data = getattr(schemas, schema_name).dump(sample_notification).data + data = getattr(schemas, schema_name).dump(sample_notification) assert data["status"] == sample_notification.status @@ -66,7 +65,7 @@ def test_user_update_schema_accepts_valid_attribute_pairs(user_attribute, user_v update_dict = {user_attribute: user_value} from app.schemas import user_update_schema_load_json - data, errors = user_update_schema_load_json.load(update_dict) + errors = user_update_schema_load_json.validate(update_dict) assert not errors @@ -85,7 +84,7 @@ def test_user_update_schema_rejects_invalid_attribute_pairs(user_attribute, user update_dict = {user_attribute: user_value} with pytest.raises(ValidationError): - data, errors = user_update_schema_load_json.load(update_dict) + user_update_schema_load_json.load(update_dict) @pytest.mark.parametrize( @@ -109,7 +108,7 @@ def test_user_update_schema_rejects_disallowed_attribute_keys(user_attribute): from app.schemas import user_update_schema_load_json with pytest.raises(ValidationError) as excinfo: - data, errors = user_update_schema_load_json.load(update_dict) + user_update_schema_load_json.load(update_dict) assert excinfo.value.messages["_schema"][0] == "Unknown field name {}".format(user_attribute) @@ -119,7 +118,7 @@ def test_provider_details_schema_returns_user_details(mocker, sample_user, curre mocker.patch("app.provider_details.switch_providers.get_user_by_id", return_value=sample_user) current_sms_provider.created_by = sample_user - data = provider_details_schema.dump(current_sms_provider).data + data = provider_details_schema.dump(current_sms_provider) assert sorted(data["created_by"].keys()) == sorted(["id", "email_address", "name"]) @@ -131,7 +130,7 @@ def test_provider_details_history_schema_returns_user_details( mocker.patch("app.provider_details.switch_providers.get_user_by_id", return_value=sample_user) current_sms_provider.created_by_id = sample_user.id - data = provider_details_schema.dump(current_sms_provider).data + data = provider_details_schema.dump(current_sms_provider) dao_update_provider_details(current_sms_provider) @@ -140,6 +139,6 @@ def test_provider_details_history_schema_returns_user_details( .order_by(desc(ProviderDetailsHistory.version)) .first() ) - data = provider_details_schema.dump(current_sms_provider_in_history).data + data = provider_details_schema.dump(current_sms_provider_in_history) assert sorted(data["created_by"].keys()) == sorted(["id", "email_address", "name"]) diff --git a/tests/app/test_sms_fragment_utils.py b/tests/app/test_sms_fragment_utils.py new file mode 100644 index 0000000000..ff57ada1cc --- /dev/null +++ b/tests/app/test_sms_fragment_utils.py @@ -0,0 +1,49 @@ +import pytest +from notifications_utils.clients.redis import sms_daily_count_cache_key + +from app.sms_fragment_utils import ( + fetch_todays_requested_sms_count, + increment_todays_requested_sms_count, +) +from tests.conftest import set_config + + +@pytest.mark.parametrize("redis_value,db_value,expected_result", [(None, 5, 5), ("3", 5, 3)]) +def test_fetch_todays_requested_sms_count(client, mocker, sample_service, redis_value, db_value, expected_result): + cache_key = sms_daily_count_cache_key(sample_service.id) + mocker.patch("app.redis_store.get", lambda x: redis_value if x == cache_key else None) + mocked_set = mocker.patch("app.redis_store.set") + mocker.patch("app.sms_fragment_utils.fetch_todays_total_sms_count", return_value=db_value) + mocker.patch("app.dao.users_dao.user_can_be_archived", return_value=False) + + with set_config(client.application, "REDIS_ENABLED", True): + actual_result = fetch_todays_requested_sms_count(sample_service.id) + + assert actual_result == expected_result + if redis_value is None: + assert mocked_set.called_once_with( + cache_key, + db_value, + ) + else: + mocked_set.assert_not_called() + + +@pytest.mark.parametrize("redis_value,db_value,increment_by", [(None, 5, 5), ("3", 5, 3)]) +def test_increment_todays_requested_sms_count(mocker, sample_service, redis_value, db_value, increment_by): + cache_key = sms_daily_count_cache_key(sample_service.id) + mocker.patch("app.redis_store.get", lambda x: redis_value if x == cache_key else None) + mocked_set = mocker.patch("app.redis_store.set") + mocked_incrby = mocker.patch("app.redis_store.incrby") + mocker.patch("app.sms_fragment_utils.fetch_todays_requested_sms_count", return_value=db_value) + + increment_todays_requested_sms_count(sample_service.id, increment_by) + + assert mocked_incrby.called_once_with(cache_key, increment_by) + if redis_value is None: + assert mocked_set.called_once_with( + cache_key, + db_value, + ) + else: + mocked_set.assert_not_called() diff --git a/tests/app/test_utils.py b/tests/app/test_utils.py index 30c1b89754..2d504c2050 100644 --- a/tests/app/test_utils.py +++ b/tests/app/test_utils.py @@ -4,8 +4,12 @@ from flask import Flask from freezegun import freeze_time +from app.config import QueueNames +from app.models import EMAIL_TYPE, SMS_TYPE from app.utils import ( + get_delivery_queue_for_template, get_document_url, + get_limit_reset_time_et, get_local_timezone_midnight, get_local_timezone_midnight_in_utc, get_logo_url, @@ -13,6 +17,7 @@ midnight_n_days_ago, update_dct_to_str, ) +from tests.app.db import create_template # Naive date times are ambiguous and are treated different on Mac OS vs flavours of *nix @@ -127,3 +132,27 @@ def test_get_document_url(notify_api: Flask): with notify_api.app_context(): assert get_document_url("en", "test.html") == "https://documentation.notification.canada.ca/en/test.html" assert get_document_url("None", "None") == "https://documentation.notification.canada.ca/None/None" + + +def test_get_limit_reset_time_et(): + # the daily limit resets at 8PM or 7PM depending on whether it's daylight savings time or not + with freeze_time("2023-08-10 00:00"): + assert get_limit_reset_time_et() == {"12hr": "8PM", "24hr": "20"} + with freeze_time("2023-01-10 00:00"): + assert get_limit_reset_time_et() == {"12hr": "7PM", "24hr": "19"} + + +@pytest.mark.parametrize( + "template_type, process_type, expected_queue", + [ + (SMS_TYPE, "normal", QueueNames.SEND_SMS_MEDIUM), + (SMS_TYPE, "priority", QueueNames.SEND_SMS_HIGH), + (SMS_TYPE, "bulk", QueueNames.SEND_SMS_LOW), + (EMAIL_TYPE, "normal", QueueNames.SEND_EMAIL_MEDIUM), + (EMAIL_TYPE, "priority", QueueNames.SEND_EMAIL_HIGH), + (EMAIL_TYPE, "bulk", QueueNames.SEND_EMAIL_LOW), + ], +) +def test_get_delivery_queue_for_template(sample_service, template_type, process_type, expected_queue): + template = create_template(sample_service, process_type=process_type, template_type=template_type) + assert get_delivery_queue_for_template(template) == expected_queue diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 9befbb8b79..45968e172a 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -1,13 +1,14 @@ import base64 import json +from unittest import mock from uuid import UUID -import mock import pytest from fido2 import cbor from flask import url_for from freezegun import freeze_time +from app.clients.salesforce.salesforce_engagement import ENGAGEMENT_STAGE_ACTIVATION from app.dao.fido2_key_dao import create_fido2_session, save_fido2_key from app.dao.login_event_dao import save_login_event from app.dao.permissions_dao import default_service_permissions @@ -117,6 +118,7 @@ def test_post_user(client, notify_db, notify_db_session): resp = client.post(url_for("user.create_user"), data=json.dumps(data), headers=headers) assert resp.status_code == 201 user = User.query.filter_by(email_address="user@digital.cabinet-office.gov.uk").first() + assert user.check_password("tQETOgIO8yzDMyCsDjLZIEVZHAvkFArYfmSI1KTsJnlnPohI2tfIa8kfng7bxCm") json_resp = json.loads(resp.get_data(as_text=True)) assert json_resp["data"]["email_address"] == user.email_address assert json_resp["data"]["id"] == str(user.id) @@ -271,6 +273,7 @@ def test_post_user_attribute(client, mocker, sample_user, user_attribute, user_v mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") resp = client.post( url_for("user.update_user_attribute", user_id=sample_user.id), @@ -282,6 +285,8 @@ def test_post_user_attribute(client, mocker, sample_user, user_attribute, user_v json_resp = json.loads(resp.get_data(as_text=True)) assert json_resp["data"][user_attribute] == user_value + mocked_salesforce_client.contact_update.assert_called_once_with(sample_user) + @pytest.mark.parametrize( "user_attribute, user_value", @@ -301,6 +306,7 @@ def test_post_user_attribute_send_notification_email( mock_persist_notification = mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") resp = client.post( url_for("user.update_user_attribute", user_id=sample_user.id), @@ -376,6 +382,7 @@ def test_post_user_attribute_with_updated_by( headers = [("Content-Type", "application/json"), auth_header] mock_persist_notification = mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") resp = client.post( url_for("user.update_user_attribute", user_id=sample_user.id), data=json.dumps(update_dict), @@ -446,6 +453,7 @@ def test_get_user_by_email(client, sample_service): assert sample_user.mobile_number == fetched["mobile_number"] assert sample_user.email_address == fetched["email_address"] assert sample_user.state == fetched["state"] + assert sample_user.password_expired == fetched["password_expired"] assert sorted(expected_permissions) == sorted(fetched["permissions"][str(sample_service.id)]) @@ -702,6 +710,26 @@ def test_send_user_reset_password_should_send_reset_password_link(client, sample assert notification.reply_to_text == notify_service.get_default_reply_to_email_address() +@freeze_time("2016-01-01 11:09:00.061258") +def test_send_user_forced_reset_password_should_send_reset_password_link( + client, sample_user, mocker, forced_password_reset_email_template +): + mocked = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") + data = json.dumps({"email": sample_user.email_address}) + auth_header = create_authorization_header() + notify_service = forced_password_reset_email_template.service + resp = client.post( + url_for("user.send_forced_user_reset_password"), + data=data, + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert resp.status_code == 204 + notification = Notification.query.first() + mocked.assert_called_once_with([str(notification.id)], queue="notify-internal-tasks") + assert notification.reply_to_text == notify_service.get_default_reply_to_email_address() + + @freeze_time("2016-01-01 11:09:00.061258") def test_send_user_reset_password_should_send_400_if_user_blocked(client, mocker, password_reset_email_template): blocked_user = create_user(blocked=True, email="blocked@cds-snc.ca") @@ -807,8 +835,7 @@ def test_send_contact_request_no_live_service(client, sample_user, mocker): } mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) - mocked_zendesk = mocker.patch("app.user.rest.Zendesk.send_ticket", return_value=201) - mocked_zendesk_sell = mocker.patch("app.user.rest.ZenDeskSell.send_contact_request", return_value=200) + mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") resp = client.post( url_for("user.send_contact_request", user_id=str(sample_user.id)), @@ -818,11 +845,10 @@ def test_send_contact_request_no_live_service(client, sample_user, mocker): assert resp.status_code == 204 mocked_freshdesk.assert_called_once_with() - mocked_zendesk.assert_called_once_with() + mocked_salesforce_client.engagement_update.assert_not_called() contact = ContactRequest(**data) contact.tags = ["z_skip_opsgenie", "z_skip_urgent_escalation"] - mocked_zendesk_sell.assert_called_once_with(contact) def test_send_contact_request_with_live_service(client, sample_service, mocker): @@ -833,8 +859,7 @@ def test_send_contact_request_with_live_service(client, sample_service, mocker): "support_type": "ask_question", } mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) - mocked_zendesk = mocker.patch("app.user.rest.Zendesk.send_ticket", return_value=201) - mocked_zendesk_sell = mocker.patch("app.user.rest.ZenDeskSell.send_contact_request", return_value=200) + mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") resp = client.post( url_for("user.send_contact_request", user_id=str(sample_user.id)), @@ -843,8 +868,7 @@ def test_send_contact_request_with_live_service(client, sample_service, mocker): ) assert resp.status_code == 204 mocked_freshdesk.assert_called_once_with() - mocked_zendesk.assert_called_once_with() - mocked_zendesk_sell.assert_called_once_with(ContactRequest(**data)) + mocked_salesforce_client.engagement_update.assert_not_called() def test_send_contact_request_demo(client, sample_user, mocker): @@ -854,7 +878,7 @@ def test_send_contact_request_demo(client, sample_user, mocker): "support_type": "demo", } mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) - mocked_zendesk = mocker.patch("app.user.rest.ZenDeskSell.send_contact_request", return_value="1") + mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") resp = client.post( url_for("user.send_contact_request", user_id=str(sample_user.id)), @@ -863,11 +887,10 @@ def test_send_contact_request_demo(client, sample_user, mocker): ) assert resp.status_code == 204 - mocked_freshdesk.assert_not_called() - + mocked_freshdesk.assert_called_once_with() + mocked_salesforce_client.engagement_update.assert_not_called() contact = ContactRequest(**data) contact.tags = ["z_skip_opsgenie", "z_skip_urgent_escalation"] - mocked_zendesk.assert_called_once_with(contact) def test_send_contact_request_go_live(client, sample_service, mocker): @@ -875,12 +898,13 @@ def test_send_contact_request_go_live(client, sample_service, mocker): data = { "name": sample_user.name, "email_address": sample_user.email_address, + "main_use_case": "I want to send emails", "support_type": "go_live_request", "service_id": str(sample_service.id), } + mocked_dao_fetch_service_by_id = mocker.patch("app.user.rest.dao_fetch_service_by_id", return_value=sample_service) mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) - mocked_zendesk = mocker.patch("app.user.rest.Zendesk.send_ticket", return_value=201) - mocked_zendesk_sell = mocker.patch("app.user.rest.ZenDeskSell.send_go_live_request", return_value="1") + mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") resp = client.post( url_for("user.send_contact_request", user_id=str(sample_user.id)), @@ -889,20 +913,62 @@ def test_send_contact_request_go_live(client, sample_service, mocker): ) assert resp.status_code == 204 mocked_freshdesk.assert_called_once_with() - mocked_zendesk.assert_called_once_with() - mocked_zendesk_sell.assert_called_once_with(sample_service, sample_user, ContactRequest(**data)) + mocked_dao_fetch_service_by_id.assert_called_once_with(str(sample_service.id)) + mocked_salesforce_client.engagement_update.assert_called_once_with( + sample_service, sample_user, {"StageName": ENGAGEMENT_STAGE_ACTIVATION, "Description": "I want to send emails"} + ) -def test_send_branding_request(client, sample_service, mocker): +@pytest.mark.parametrize( + "organisation_notes, department_org_name", + [ + ("TBS > CDS", "TBS > CDS"), + (None, "Unknown"), + ], +) +def test_send_contact_request_go_live_with_org_notes(organisation_notes, department_org_name, client, sample_service, mocker): sample_user = sample_service.users[0] + sample_service.organisation_notes = organisation_notes + data = { + "name": sample_user.name, + "email_address": sample_user.email_address, + "main_use_case": "I want to send emails", + "support_type": "go_live_request", + "service_id": str(sample_service.id), + } + mock_contact_request = mocker.MagicMock() + mocker.patch("app.user.rest.ContactRequest", return_value=mock_contact_request) + mocker.patch("app.user.rest.dao_fetch_service_by_id", return_value=sample_service) + mocker.patch("app.user.rest.dao_update_service") + mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) + mocker.patch("app.user.rest.get_user_by_email", return_value=sample_user) + mocker.patch("app.user.rest.salesforce_client") + mock_contact_request.department_org_name = None + + resp = client.post( + url_for("user.send_contact_request", user_id=str(sample_user.id)), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), create_authorization_header()], + ) + assert resp.status_code == 204 + assert mock_contact_request.department_org_name == department_org_name + + +def test_send_branding_request(client, sample_service, sample_organisation, mocker): + sample_user = sample_service.users[0] + sample_service.organisation = sample_organisation post_data = { "service_name": sample_service.name, "email_address": sample_user.email_address, "serviceID": str(sample_service.id), + "organisation_id": str(sample_service.organisation.id), + "organisation_name": sample_service.organisation.name, "filename": "branding_url", + "alt_text_en": "hello world", + "alt_text_fr": "bonjour", } mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) - mocked_zendesk = mocker.patch("app.user.rest.Zendesk.send_ticket", return_value=201) + mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") resp = client.post( url_for("user.send_branding_request", user_id=str(sample_user.id)), @@ -911,7 +977,32 @@ def test_send_branding_request(client, sample_service, mocker): ) assert resp.status_code == 204 mocked_freshdesk.assert_called_once_with() - mocked_zendesk.assert_called_once_with() + mocked_salesforce_client.engagement_update.assert_not_called() + + +class TestFreshDeskRequestTickets: + def test_send_request_for_new_category(self, client, sample_service, sample_organisation, mocker): + sample_user = sample_service.users[0] + sample_service.organisation = sample_organisation + post_data = { + "service_name": sample_service.name, + "email_address": sample_user.email_address, + "service_id": str(sample_service.id), + "template_category_name_en": "test", + "template_category_name_fr": "test", + "template_id": "1234", + } + mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201) + mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") + + resp = client.post( + url_for("user.send_new_template_category_request", user_id=str(sample_user.id)), + data=json.dumps(post_data), + headers=[("Content-Type", "application/json"), create_authorization_header()], + ) + assert resp.status_code == 204 + mocked_freshdesk.assert_called_once_with() + mocked_salesforce_client.engagement_update.assert_not_called() def test_send_user_confirm_new_email_returns_204(client, sample_user, change_email_confirmation_template, mocker): @@ -986,15 +1077,53 @@ def test_update_user_password_failes_when_banned_password_used(client, sample_se assert resp.status_code == 400 -def test_activate_user(admin_request, sample_user): +def test_update_user_password_creates_LoginEvent_when_loginData_provided(client, sample_service, mocker): + sample_user = sample_service.users[0] + new_password = "Sup3rS3cur3_P4ssw0rd" + data = {"_password": new_password, "loginData": {"some": "data"}} + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + + resp = client.post( + url_for("user.update_password", user_id=sample_user.id), + data=json.dumps(data), + headers=headers, + ) + assert resp.status_code == 200 + + assert LoginEvent.query.count() == 1 + + +def test_update_user_password_does_not_create_LoginEvent_when_loginData_not_provided(client, sample_service, mocker): + sample_user = sample_service.users[0] + new_password = "Sup3rS3cur3_P4ssw0rd" + data = {"_password": new_password} + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + + resp = client.post( + url_for("user.update_password", user_id=sample_user.id), + data=json.dumps(data), + headers=headers, + ) + assert resp.status_code == 200 + + assert LoginEvent.query.count() == 0 + + +def test_activate_user(admin_request, sample_user, mocker): sample_user.state = "pending" + mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client") + resp = admin_request.post("user.activate_user", user_id=sample_user.id) assert resp["data"]["id"] == str(sample_user.id) assert resp["data"]["state"] == "active" assert sample_user.state == "active" + mocked_salesforce_client.contact_create.assert_called_once_with(sample_user) + def test_activate_user_fails_if_already_active(admin_request, sample_user): resp = admin_request.post("user.activate_user", user_id=sample_user.id, _expected_status=400) @@ -1005,6 +1134,7 @@ def test_activate_user_fails_if_already_active(admin_request, sample_user): def test_update_user_auth_type(admin_request, sample_user, account_change_template, mocker): mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") assert sample_user.auth_type == "email_auth" resp = admin_request.post( @@ -1020,6 +1150,7 @@ def test_update_user_auth_type(admin_request, sample_user, account_change_templa def test_can_set_email_auth_and_remove_mobile_at_same_time(admin_request, sample_user, account_change_template, mocker): mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") sample_user.auth_type = SMS_AUTH_TYPE admin_request.post( @@ -1038,6 +1169,7 @@ def test_can_set_email_auth_and_remove_mobile_at_same_time(admin_request, sample def test_cannot_remove_mobile_if_sms_auth(admin_request, sample_user, account_change_template, mocker): mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") sample_user.auth_type = SMS_AUTH_TYPE json_resp = admin_request.post( @@ -1053,6 +1185,7 @@ def test_cannot_remove_mobile_if_sms_auth(admin_request, sample_user, account_ch def test_can_remove_mobile_if_email_auth(admin_request, sample_user, account_change_template, mocker): mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") sample_user.auth_type = EMAIL_AUTH_TYPE admin_request.post( @@ -1067,6 +1200,7 @@ def test_can_remove_mobile_if_email_auth(admin_request, sample_user, account_cha def test_cannot_update_user_with_mobile_number_as_empty_string(admin_request, sample_user, account_change_template, mocker): mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") sample_user.auth_type = EMAIL_AUTH_TYPE resp = admin_request.post( @@ -1081,13 +1215,14 @@ def test_cannot_update_user_with_mobile_number_as_empty_string(admin_request, sa def test_cannot_update_user_password_using_attributes_method(admin_request, sample_user, account_change_template, mocker): mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") resp = admin_request.post( "user.update_user_attribute", user_id=sample_user.id, _data={"password": "foo"}, _expected_status=400, ) - assert resp["message"]["_schema"] == ["Unknown field name password"] + assert resp == {"message": {"_schema": ["Unknown field name password"]}, "result": "error"} def test_get_orgs_and_services_nests_services(admin_request, sample_user): @@ -1430,6 +1565,7 @@ def test_list_login_events_for_a_user(client, sample_service): def test_update_user_blocked(admin_request, sample_user, account_change_template, mocker): mocker.patch("app.user.rest.persist_notification") mocker.patch("app.user.rest.send_notification_to_queue") + mocker.patch("app.user.rest.salesforce_client") resp = admin_request.post( "user.update_user_attribute", user_id=sample_user.id, @@ -1438,3 +1574,34 @@ def test_update_user_blocked(admin_request, sample_user, account_change_template assert resp["data"]["id"] == str(sample_user.id) assert resp["data"]["blocked"] + + +class TestFailedLogin: + def test_update_user_password_saves_correctly(self, client, sample_service, mocker): + sample_user = sample_service.users[0] + new_password = "tQETOgIO8yzDMyCsDjLZIEVZHAvkFArYfmSI1KTsJnlnPohI2tfIa8kfng7bxCm" + data = {"_password": new_password} + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + resp = client.post( + url_for("user.update_password", user_id=sample_user.id), + data=json.dumps(data), + headers=headers, + ) + assert resp.status_code == 200 + + json_resp = json.loads(resp.get_data(as_text=True)) + assert json_resp["data"]["password_changed_at"] is not None + data = {"password": new_password} + auth_header = create_authorization_header() + headers = [("Content-Type", "application/json"), auth_header] + # We force a the password to fail on login + mocker.patch("app.models.User.check_password", return_value=False) + + resp = client.post( + url_for("user.verify_user_password", user_id=str(sample_user.id)), + data=json.dumps(data), + headers=headers, + ) + assert resp.status_code == 400 + assert "Incorrect password for user_id" in resp.json["message"]["password"][0] diff --git a/tests/app/user/test_rest_verify.py b/tests/app/user/test_rest_verify.py index 9220bb673a..8743391aeb 100644 --- a/tests/app/user/test_rest_verify.py +++ b/tests/app/user/test_rest_verify.py @@ -13,6 +13,7 @@ from app.dao.users_dao import create_user_code from app.models import EMAIL_TYPE, SMS_TYPE, Notification, User, VerifyCode from tests import create_authorization_header +from tests.conftest import set_config_values @freeze_time("2016-01-01T12:00:00") @@ -111,21 +112,22 @@ def test_user_verify_password_creates_login_event(client, sample_user): assert len(events) == 1 -def test_user_verify_password_invalid_password(client, sample_user): +def test_user_verify_password_invalid_password(client, sample_user, mocker): data = json.dumps({"password": "bad password"}) auth_header = create_authorization_header() assert sample_user.failed_login_count == 0 - resp = client.post( - url_for("user.verify_user_password", user_id=sample_user.id), - data=data, - headers=[("Content-Type", "application/json"), auth_header], - ) - assert resp.status_code == 400 - json_resp = json.loads(resp.get_data(as_text=True)) - assert "Incorrect password" in json_resp["message"]["password"] - assert sample_user.failed_login_count == 1 + with set_config_values(current_app, {"FAILED_LOGIN_LIMIT": 10}): + resp = client.post( + url_for("user.verify_user_password", user_id=sample_user.id), + data=data, + headers=[("Content-Type", "application/json"), auth_header], + ) + assert resp.status_code == 400 + json_resp = json.loads(resp.get_data(as_text=True)) + assert "Incorrect password" in json_resp["message"]["password"][0] + assert sample_user.failed_login_count == 1 def test_user_verify_password_valid_password_resets_failed_logins(client, sample_user): @@ -134,27 +136,28 @@ def test_user_verify_password_valid_password_resets_failed_logins(client, sample assert sample_user.failed_login_count == 0 - resp = client.post( - url_for("user.verify_user_password", user_id=sample_user.id), - data=data, - headers=[("Content-Type", "application/json"), auth_header], - ) - assert resp.status_code == 400 - json_resp = json.loads(resp.get_data(as_text=True)) - assert "Incorrect password" in json_resp["message"]["password"] - - assert sample_user.failed_login_count == 1 - - data = json.dumps({"password": "password"}) - auth_header = create_authorization_header() - resp = client.post( - url_for("user.verify_user_password", user_id=sample_user.id), - data=data, - headers=[("Content-Type", "application/json"), auth_header], - ) + with set_config_values(current_app, {"FAILED_LOGIN_LIMIT": 10}): + resp = client.post( + url_for("user.verify_user_password", user_id=sample_user.id), + data=data, + headers=[("Content-Type", "application/json"), auth_header], + ) + assert resp.status_code == 400 + json_resp = json.loads(resp.get_data(as_text=True)) + assert "Incorrect password" in json_resp["message"]["password"][0] + + assert sample_user.failed_login_count == 1 + + data = json.dumps({"password": "password"}) + auth_header = create_authorization_header() + resp = client.post( + url_for("user.verify_user_password", user_id=sample_user.id), + data=data, + headers=[("Content-Type", "application/json"), auth_header], + ) - assert resp.status_code == 204 - assert sample_user.failed_login_count == 0 + assert resp.status_code == 204 + assert sample_user.failed_login_count == 0 def test_user_verify_password_missing_password(client, sample_user): diff --git a/tests/app/v2/notifications/test_get_notifications.py b/tests/app/v2/notifications/test_get_notifications.py index 2f9023bc98..cfa4e7bc91 100644 --- a/tests/app/v2/notifications/test_get_notifications.py +++ b/tests/app/v2/notifications/test_get_notifications.py @@ -62,6 +62,7 @@ def test_get_notification_by_id_returns_200(client, billable_units, provider, sa "postcode": None, "type": "{}".format(sample_notification.notification_type), "status": "{}".format(sample_notification.status), + "status_description": "{}".format(sample_notification.formatted_status), "provider_response": sample_notification.provider_response, "template": expected_template_response, "created_at": sample_notification.created_at.strftime(DATETIME_FORMAT), @@ -116,6 +117,7 @@ def test_get_notification_by_id_with_placeholders_returns_200(client, sample_ema "postcode": None, "type": "{}".format(sample_notification.notification_type), "status": "{}".format(sample_notification.status), + "status_description": "{}".format(sample_notification.formatted_status), "provider_response": sample_notification.provider_response, "template": expected_template_response, "created_at": sample_notification.created_at.strftime(DATETIME_FORMAT), @@ -221,10 +223,7 @@ def test_get_notification_by_id_nonexistent_id(client, sample_notification): assert response.headers["Content-type"] == "application/json" json_response = json.loads(response.get_data(as_text=True)) - assert json_response == { - "errors": [{"error": "NoResultFound", "message": "No result found"}], - "status_code": 404, - } + assert json_response == {"message": "Notification not found in database", "result": "error"} @pytest.mark.parametrize("id", ["1234-badly-formatted-id-7890", "0"]) @@ -321,6 +320,7 @@ def test_get_all_notifications_except_job_notifications_returns_200(client, samp assert json_response["notifications"][0]["id"] == str(notification.id) assert json_response["notifications"][0]["status"] == "created" + assert json_response["notifications"][0]["status_description"] == "In transit" assert json_response["notifications"][0]["template"] == { "id": str(notification.template.id), "uri": notification.template.get_link(), @@ -353,6 +353,7 @@ def test_get_all_notifications_with_include_jobs_arg_returns_200(client, sample_ assert json_response["notifications"][0]["id"] == str(notification.id) assert json_response["notifications"][0]["status"] == notification.status + assert json_response["notifications"][0]["status_description"] == notification.formatted_status assert json_response["notifications"][0]["phone_number"] == notification.to assert json_response["notifications"][0]["type"] == notification.template.template_type assert not json_response["notifications"][0]["scheduled_for"] @@ -397,6 +398,7 @@ def test_get_all_notifications_filter_by_template_type(client, sample_service): assert json_response["notifications"][0]["id"] == str(notification.id) assert json_response["notifications"][0]["status"] == "created" + assert json_response["notifications"][0]["status_description"] == "In transit" assert json_response["notifications"][0]["template"] == { "id": str(email_template.id), "uri": notification.template.get_link(), @@ -443,6 +445,7 @@ def test_get_all_notifications_filter_by_single_status(client, sample_template): assert json_response["notifications"][0]["id"] == str(notification.id) assert json_response["notifications"][0]["status"] == "pending" + assert json_response["notifications"][0]["status_description"] == "In transit" def test_get_all_notifications_filter_by_status_invalid_status(client, sample_notification): diff --git a/tests/app/v2/notifications/test_post_letter_notifications.py b/tests/app/v2/notifications/test_post_letter_notifications.py index cdc625a531..1a01439f82 100644 --- a/tests/app/v2/notifications/test_post_letter_notifications.py +++ b/tests/app/v2/notifications/test_post_letter_notifications.py @@ -58,6 +58,7 @@ def letter_request( return json_resp +@pytest.mark.skip(reason="Deprecated: LETTER CODE") @pytest.mark.parametrize("reference", [None, "reference_from_client"]) def test_post_letter_notification_returns_201(client, sample_letter_template, mocker, reference): mock = mocker.patch("app.celery.tasks.letters_pdf_tasks.create_letters_pdf.apply_async") @@ -97,6 +98,7 @@ def test_post_letter_notification_returns_201(client, sample_letter_template, mo mock.assert_called_once_with([str(notification.id)], queue=QueueNames.CREATE_LETTERS_PDF) +@pytest.mark.skip(reason="Deprecated: LETTER CODE") def test_post_letter_notification_sets_postage(client, notify_db_session, mocker): service = create_service(service_permissions=[LETTER_TYPE]) template = create_template(service, template_type="letter", postage="first") @@ -129,7 +131,6 @@ def test_post_letter_notification_sets_postage(client, notify_db_session, mocker def test_post_letter_notification_with_test_key_creates_pdf_and_sets_status_to_delivered( notify_api, client, sample_letter_template, mocker, env ): - data = { "template_id": str(sample_letter_template.id), "personalisation": { @@ -170,7 +171,6 @@ def test_post_letter_notification_with_test_key_creates_pdf_and_sets_status_to_d def test_post_letter_notification_with_test_key_creates_pdf_and_sets_status_to_sending_and_sends_fake_response_file( notify_api, client, sample_letter_template, mocker, env ): - data = { "template_id": str(sample_letter_template.id), "personalisation": { @@ -278,7 +278,7 @@ def test_notification_returns_400_if_address_doesnt_have_underscores(client, sam def test_returns_a_429_limit_exceeded_if_rate_limit_exceeded(client, sample_letter_template, mocker): - persist_mock = mocker.patch("app.v2.notifications.post_notifications.persist_notification") + persist_mock = mocker.patch("app.notifications.process_notifications.persist_notification") mocker.patch( "app.v2.notifications.post_notifications.check_rate_limiting", side_effect=RateLimitError("LIMIT", "INTERVAL", "TYPE"), diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index d325365333..cc33c4d527 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -8,18 +8,25 @@ import pytest from flask import current_app, json from freezegun import freeze_time +from notifications_python_client.authentication import create_jwt_token +from app import signer_notification +from app.celery.tasks import seed_bounce_rate_in_redis +from app.dao.api_key_dao import get_unsigned_secret, save_model_api_key from app.dao.jobs_dao import dao_get_job_by_id -from app.dao.service_sms_sender_dao import dao_update_service_sms_sender from app.models import ( EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, - NOTIFICATION_CREATED, + KEY_TYPE_NORMAL, + KEY_TYPE_TEAM, + KEY_TYPE_TEST, SCHEDULE_NOTIFICATIONS, SMS_TYPE, UPLOAD_DOCUMENT, + ApiKey, Notification, ScheduledNotification, + ServiceSmsSender, ) from app.schema_validation import validate from app.utils import get_document_url @@ -28,8 +35,14 @@ post_email_response, post_sms_response, ) +from app.v2.notifications.post_notifications import _seed_bounce_data from tests import create_authorization_header -from tests.app.conftest import document_download_response, sample_template +from tests.app.conftest import ( + create_sample_notification, + create_sample_template, + document_download_response, + random_sized_content, +) from tests.app.db import ( create_api_key, create_reply_to_email, @@ -37,8 +50,9 @@ create_service_sms_sender, create_service_with_inbound_number, create_template, + create_user, ) -from tests.conftest import set_config +from tests.conftest import set_config, set_config_values def rows_to_csv(rows): @@ -48,440 +62,711 @@ def rows_to_csv(rows): return output.getvalue() -@pytest.mark.parametrize("reference", [None, "reference_from_client"]) -def test_post_sms_notification_returns_201(notify_api, client, sample_template_with_placeholders, mocker, reference): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(sample_template_with_placeholders.id), - "personalisation": {" Name": "Jo"}, - } - if reference: - data.update({"reference": reference}) - auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) +class TestSingleEndpointSucceeds: + @pytest.mark.parametrize("reference", [None, "reference_from_client"]) + def test_post_sms_notification_returns_201(self, notify_api, client, sample_template_with_placeholders, mocker, reference): + mock_publish = mocker.patch("app.sms_normal_publish.publish") + data = { + "phone_number": "+16502532222", + "template_id": str(sample_template_with_placeholders.id), + "personalisation": {" Name": "Jo"}, + } + if reference: + data.update({"reference": reference}) + auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_sms_response) == resp_json - notifications = Notification.query.all() - assert len(notifications) == 1 - assert notifications[0].status == NOTIFICATION_CREATED - notification_id = notifications[0].id - assert notifications[0].postage is None - assert resp_json["id"] == str(notification_id) - assert resp_json["reference"] == reference - assert resp_json["content"]["body"] == sample_template_with_placeholders.content.replace("(( Name))", "Jo") - assert resp_json["content"]["from_number"] == current_app.config["FROM_NUMBER"] - assert "v2/notifications/{}".format(notification_id) in resp_json["uri"] - assert resp_json["template"]["id"] == str(sample_template_with_placeholders.id) - assert resp_json["template"]["version"] == sample_template_with_placeholders.version - assert ( - "services/{}/templates/{}".format( - sample_template_with_placeholders.service_id, - sample_template_with_placeholders.id, - ) - in resp_json["template"]["uri"] - ) - assert not resp_json["scheduled_for"] - assert mocked.called + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + assert validate(resp_json, post_sms_response) == resp_json + + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data["phone_number"] + assert mock_publish_args_unsigned["id"] == resp_json["id"] + + assert resp_json["id"] == str(mock_publish_args_unsigned["id"]) + assert resp_json["reference"] == reference + assert resp_json["content"]["body"] == sample_template_with_placeholders.content.replace("(( Name))", "Jo") + assert resp_json["content"]["from_number"] == current_app.config["FROM_NUMBER"] + assert "v2/notifications/{}".format(mock_publish_args_unsigned["id"]) in resp_json["uri"] + assert resp_json["template"]["id"] == str(sample_template_with_placeholders.id) + assert resp_json["template"]["version"] == sample_template_with_placeholders.version + assert ( + "services/{}/templates/{}".format( + sample_template_with_placeholders.service_id, + sample_template_with_placeholders.id, + ) + in resp_json["template"]["uri"] + ) + assert not resp_json["scheduled_for"] + def test_post_sms_notification_uses_sms_sender_id_reply_to( + self, notify_api, client, sample_template_with_placeholders, mocker + ): + sms_sender = create_service_sms_sender(service=sample_template_with_placeholders.service, sms_sender="6502532222") + mock_publish = mocker.patch("app.sms_normal_publish.publish") + data = { + "phone_number": "+16502532222", + "template_id": str(sample_template_with_placeholders.id), + "personalisation": {" Name": "Jo"}, + "sms_sender_id": str(sms_sender.id), + } + auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) -@pytest.mark.parametrize("reference", [None, "reference_from_client"]) -def test_post_sms_notification_with_persistance_in_celery_returns_201( - notify_api, client, sample_template_with_placeholders, mocker, reference -): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = 1 - mocked = mocker.patch("app.celery.tasks.save_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(sample_template_with_placeholders.id), - "personalisation": {" Name": "Jo"}, - } - if reference: - data.update({"reference": reference}) - auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + assert validate(resp_json, post_sms_response) == resp_json + assert resp_json["content"]["from_number"] == "+16502532222" + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data["phone_number"] + assert mock_publish_args_unsigned["id"] == resp_json["id"] + + def test_post_sms_notification_uses_inbound_number_as_sender(self, notify_api, client, notify_db_session, mocker): + service = create_service_with_inbound_number(inbound_number="1") + template = create_template(service=service, content="Hello (( Name))\nYour thing is due soon") + mock_publish = mocker.patch("app.sms_normal_publish.publish") + data = { + "phone_number": "+16502532222", + "template_id": str(template.id), + "personalisation": {" Name": "Jo"}, + } + auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_sms_response) == resp_json - assert resp_json["reference"] == reference - assert resp_json["content"]["body"] == sample_template_with_placeholders.content.replace("(( Name))", "Jo") - assert resp_json["content"]["from_number"] == current_app.config["FROM_NUMBER"] - assert "v2/notifications/{}".format(resp_json["id"]) in resp_json["uri"] - assert resp_json["template"]["id"] == str(sample_template_with_placeholders.id) - assert resp_json["template"]["version"] == sample_template_with_placeholders.version - assert ( - "services/{}/templates/{}".format( - sample_template_with_placeholders.service_id, - sample_template_with_placeholders.id, - ) - in resp_json["template"]["uri"] - ) - assert not resp_json["scheduled_for"] - assert mocked.called - - -def test_post_sms_notification_uses_inbound_number_as_sender(notify_api, client, notify_db_session, mocker): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - service = create_service_with_inbound_number(inbound_number="1") - template = create_template(service=service, content="Hello (( Name))\nYour thing is due soon") - mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(template.id), - "personalisation": {" Name": "Jo"}, - } - auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + assert validate(resp_json, post_sms_response) == resp_json + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data["phone_number"] + assert mock_publish_args_unsigned["id"] == resp_json["id"] + assert resp_json["content"]["from_number"] == "1" + + def test_post_sms_notification_returns_201_with_sms_sender_id( + self, notify_api, client, sample_template_with_placeholders, mocker + ): + sms_sender = create_service_sms_sender(service=sample_template_with_placeholders.service, sms_sender="123456") + mock_publish = mocker.patch("app.sms_normal_publish.publish") + data = { + "phone_number": "+16502532222", + "template_id": str(sample_template_with_placeholders.id), + "personalisation": {" Name": "Jo"}, + "sms_sender_id": str(sms_sender.id), + } + auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_sms_response) == resp_json - notifications = Notification.query.all() - assert len(notifications) == 1 - notification_id = notifications[0].id - assert resp_json["id"] == str(notification_id) - assert resp_json["content"]["from_number"] == "1" - assert notifications[0].reply_to_text == "1" - mocked.assert_called_once_with([str(notification_id)], queue="send-sms-tasks") - - -def test_post_sms_notification_uses_inbound_number_reply_to_as_sender(notify_api, client, notify_db_session, mocker): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - service = create_service_with_inbound_number(inbound_number="6502532222") - template = create_template(service=service, content="Hello (( Name))\nYour thing is due soon") - mocked = mocker.patch("app.celery.provider_tasks.deliver_throttled_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(template.id), - "personalisation": {" Name": "Jo"}, - } - auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + assert validate(resp_json, post_sms_response) == resp_json + assert resp_json["content"]["from_number"] == sms_sender.sms_sender + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data["phone_number"] + assert mock_publish_args_unsigned["id"] == resp_json["id"] + + def test_post_sms_notification_returns_201_if_allowed_to_send_int_sms( + self, + notify_api, + sample_service, + sample_template, + client, + mocker, + ): + mocker.patch("app.sms_normal_publish.publish") + + data = {"phone_number": "+20-12-1234-1234", "template_id": sample_template.id} + auth_header = create_authorization_header(service_id=sample_service.id) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == 201 + assert response.headers["Content-type"] == "application/json" + + def test_post_sms_should_publish_supplied_sms_number(self, notify_api, client, sample_template_with_placeholders, mocker): + mock_publish = mocker.patch("app.sms_normal_publish.publish") + + data = { + "phone_number": "+16502532222", + "template_id": str(sample_template_with_placeholders.id), + "personalisation": {" Name": "Jo"}, + } + + auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data["phone_number"] + assert mock_publish_args_unsigned["id"] == resp_json["id"] + + @pytest.mark.parametrize("reference", [None, "reference_from_client"]) + def test_post_email_notification_returns_201(notify_api, client, sample_email_template_with_placeholders, mocker, reference): + mock_publish = mocker.patch("app.email_normal_publish.publish") + data = { + "email_address": sample_email_template_with_placeholders.service.users[0].email_address, + "template_id": sample_email_template_with_placeholders.id, + "personalisation": {"name": "Bob"}, + } + if reference: + data.update({"reference": reference}) + auth_header = create_authorization_header(service_id=sample_email_template_with_placeholders.service_id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + assert validate(resp_json, post_email_response) == resp_json + + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data["email_address"] + assert mock_publish_args_unsigned["id"] == resp_json["id"] + + assert resp_json["reference"] == reference + assert resp_json["content"]["body"] == sample_email_template_with_placeholders.content.replace("((name))", "Bob") + assert resp_json["content"]["subject"] == sample_email_template_with_placeholders.subject.replace("((name))", "Bob") + assert resp_json["content"]["from_email"] == "{}@{}".format( + sample_email_template_with_placeholders.service.email_from, + current_app.config["NOTIFY_EMAIL_DOMAIN"], + ) + assert "v2/notifications/{}".format(mock_publish_args_unsigned["id"]) in resp_json["uri"] + assert resp_json["template"]["id"] == str(sample_email_template_with_placeholders.id) + assert resp_json["template"]["version"] == sample_email_template_with_placeholders.version + assert ( + "services/{}/templates/{}".format( + str(sample_email_template_with_placeholders.service_id), + str(sample_email_template_with_placeholders.id), + ) + in resp_json["template"]["uri"] + ) + assert not resp_json["scheduled_for"] + + def test_post_email_notification_with_valid_reply_to_id_returns_201(self, notify_api, client, sample_email_template, mocker): + reply_to_email = create_reply_to_email(sample_email_template.service, "test@test.com") + mock_publish = mocker.patch("app.email_normal_publish.publish") + data = { + "email_address": sample_email_template.service.users[0].email_address, + "template_id": sample_email_template.id, + "email_reply_to_id": reply_to_email.id, + } + auth_header = create_authorization_header(service_id=sample_email_template.service_id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + assert validate(resp_json, post_email_response) == resp_json + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data["email_address"] + assert mock_publish_args_unsigned["id"] == resp_json["id"] + + +class TestPostNotificationsErrors: + @pytest.mark.parametrize( + "notification_type, key_send_to, send_to", + [ + ("sms", "phone_number", "+16502532222"), + ("email", "email_address", "sample@email.com"), + ], ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_sms_response) == resp_json - notifications = Notification.query.all() - assert len(notifications) == 1 - notification_id = notifications[0].id - assert resp_json["id"] == str(notification_id) - assert resp_json["content"]["from_number"] == "+16502532222" - assert notifications[0].reply_to_text == "+16502532222" - mocked.assert_called_once_with([str(notification_id)], queue="send-throttled-sms-tasks") - - -def test_post_sms_notification_returns_201_with_sms_sender_id(notify_api, client, sample_template_with_placeholders, mocker): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - sms_sender = create_service_sms_sender(service=sample_template_with_placeholders.service, sms_sender="123456") - mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(sample_template_with_placeholders.id), - "personalisation": {" Name": "Jo"}, - "sms_sender_id": str(sms_sender.id), - } - auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) + def test_post_notification_returns_400_and_missing_template( + self, client, sample_service, notification_type, key_send_to, send_to + ): + data = {key_send_to: send_to, "template_id": str(uuid.uuid4())} + auth_header = create_authorization_header(service_id=sample_service.id) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + response = client.post( + path="/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == 400 + assert response.headers["Content-type"] == "application/json" + + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["status_code"] == 400 + assert error_json["errors"] == [{"error": "BadRequestError", "message": "Template not found"}] + + @pytest.mark.parametrize( + "notification_type, key_send_to, send_to", + [ + ("sms", "phone_number", "+16502532222"), + ("email", "email_address", "sample@email.com"), + ( + "letter", + "personalisation", + {"address_line_1": "The queen", "postcode": "SW1 1AA"}, + ), + ], ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_sms_response) == resp_json - assert resp_json["content"]["from_number"] == sms_sender.sms_sender - notifications = Notification.query.all() - assert len(notifications) == 1 - assert notifications[0].reply_to_text == sms_sender.sms_sender - mocked.assert_called_once_with([resp_json["id"]], queue="send-sms-tasks") + def test_post_notification_returns_401_and_well_formed_auth_error( + self, client, sample_template, sample_email_template, notification_type, key_send_to, send_to + ): + data = { + key_send_to: send_to, + "template_id": str(sample_template.id) if notification_type == "sms" else str(sample_email_template.id), + } + response = client.post( + path="/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json")], + ) -def test_post_sms_notification_with_celery_persistence_returns_201_with_sms_sender_id( - notify_api, client, sample_template_with_placeholders, mocker -): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = 1 - sms_sender = create_service_sms_sender(service=sample_template_with_placeholders.service, sms_sender="123456") - mocked = mocker.patch("app.celery.tasks.save_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(sample_template_with_placeholders.id), - "personalisation": {" Name": "Jo"}, - "sms_sender_id": str(sms_sender.id), - } - auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) + assert response.status_code == 401 + assert response.headers["Content-type"] == "application/json" + error_resp = json.loads(response.get_data(as_text=True)) + assert error_resp["status_code"] == 401 + assert error_resp["errors"] == [ + { + "error": "AuthError", + "message": "Unauthorized, authentication token must be provided", + } + ] - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + @pytest.mark.parametrize( + "notification_type, key_send_to, send_to", + [ + ("sms", "phone_number", "+16502532222"), + ("email", "email_address", "sample@email.com"), + ], ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_sms_response) == resp_json - assert resp_json["content"]["from_number"] == sms_sender.sms_sender - assert mocked.called - - -def test_post_sms_notification_uses_sms_sender_id_reply_to(notify_api, client, sample_template_with_placeholders, mocker): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - sms_sender = create_service_sms_sender(service=sample_template_with_placeholders.service, sms_sender="6502532222") - mocked = mocker.patch("app.celery.provider_tasks.deliver_throttled_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(sample_template_with_placeholders.id), - "personalisation": {" Name": "Jo"}, - "sms_sender_id": str(sms_sender.id), - } - auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) + def test_missing_template_id_returns_400(self, client, sample_template, notification_type, key_send_to, send_to): + data = {key_send_to: send_to} + auth_header = create_authorization_header(service_id=sample_template.service_id) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_sms_response) == resp_json - assert resp_json["content"]["from_number"] == "+16502532222" - notifications = Notification.query.all() - assert len(notifications) == 1 - assert notifications[0].reply_to_text == "+16502532222" - mocked.assert_called_once_with([resp_json["id"]], queue="send-throttled-sms-tasks") + response = client.post( + path="/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + assert response.headers["Content-type"] == "application/json" + error_resp = json.loads(response.get_data(as_text=True)) + assert error_resp["status_code"] == 400 + assert { + "error": "ValidationError", + "message": "template_id is a required property", + } in error_resp["errors"] + + @pytest.mark.parametrize( + "notification_type, missing_key", + [ + ("sms", "phone_number"), + ("email", "email_address"), + ], + ) + def test_missing_recipient_returns_400(self, client, sample_template, sample_email_template, notification_type, missing_key): + data = {"template_id": str(sample_template.id) if notification_type == "sms" else str(sample_email_template.id)} + auth_header = create_authorization_header(service_id=sample_template.service_id) -def test_notification_reply_to_text_is_original_value_if_sender_is_changed_after_post_notification( - notify_api, client, sample_template, mocker -): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - sms_sender = create_service_sms_sender(service=sample_template.service, sms_sender="123456", is_default=False) - mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(sample_template.id), - "sms_sender_id": str(sms_sender.id), - } - auth_header = create_authorization_header(service_id=sample_template.service_id) + response = client.post( + path="/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + assert response.status_code == 400 + assert response.headers["Content-type"] == "application/json" + error_resp = json.loads(response.get_data(as_text=True)) + assert error_resp["status_code"] == 400 + assert { + "error": "ValidationError", + "message": f"{missing_key} is a required property", + } in error_resp["errors"] + + @pytest.mark.parametrize( + "notification_type, key_send_to, send_to", + [ + ("sms", "phone_number", "+16502532222"), + ("email", "email_address", "sample@email.com"), + ], ) + def test_extra_field_returns_400( + self, client, sample_template, sample_email_template, notification_type, key_send_to, send_to + ): + data = { + key_send_to: send_to, + "template_id": str(sample_template.id) if notification_type == "sms" else str(sample_email_template.id), + "test_field": "not wanted", + } + auth_header = create_authorization_header(service_id=sample_template.service_id) + + response = client.post( + path="/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) - dao_update_service_sms_sender( - service_id=sample_template.service_id, - service_sms_sender_id=sms_sender.id, - is_default=sms_sender.is_default, - sms_sender="updated", + assert response.status_code == 400 + assert response.headers["Content-type"] == "application/json" + error_resp = json.loads(response.get_data(as_text=True)) + assert error_resp["status_code"] == 400 + assert { + "error": "ValidationError", + "message": "Additional properties are not allowed (test_field was unexpected)", + } in error_resp["errors"] + + @pytest.mark.parametrize( + "notification_type, key_send_to, send_to", + [ + ("sms", "phone_number", "6502532222"), + ("email", "email_address", "sample@email.com"), + ], ) + def test_returns_a_429_limit_exceeded_if_rate_limit_exceeded( + self, notify_api, client, sample_service, mocker, notification_type, key_send_to, send_to + ): + sample = create_template(service=sample_service, template_type=notification_type) + save_mock = mocker.patch("app.v2.notifications.post_notifications.db_save_and_send_notification") + mocker.patch( + "app.v2.notifications.post_notifications.check_rate_limiting", + side_effect=RateLimitError("LIMIT", "INTERVAL", "TYPE"), + ) - assert response.status_code == 201 - notifications = Notification.query.all() - assert len(notifications) == 1 - assert notifications[0].reply_to_text == "123456" + data = {key_send_to: send_to, "template_id": str(sample.id)} + auth_header = create_authorization_header(service_id=sample.service_id) -@pytest.mark.parametrize( - "notification_type, key_send_to, send_to", - [ - ("sms", "phone_number", "+16502532222"), - ("email", "email_address", "sample@email.com"), - ], -) -def test_post_notification_returns_400_and_missing_template(client, sample_service, notification_type, key_send_to, send_to): + response = client.post( + path="/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) - data = {key_send_to: send_to, "template_id": str(uuid.uuid4())} - auth_header = create_authorization_header(service_id=sample_service.id) + error = json.loads(response.data)["errors"][0]["error"] + message = json.loads(response.data)["errors"][0]["message"] + status_code = json.loads(response.data)["status_code"] + assert response.status_code == 429 + assert error == "RateLimitError" + assert message == "Exceeded rate limit for key type TYPE of LIMIT requests per INTERVAL seconds" + assert status_code == 429 - response = client.post( - path="/v2/notifications/{}".format(notification_type), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) + assert not save_mock.called - assert response.status_code == 400 - assert response.headers["Content-type"] == "application/json" + def test_post_sms_notification_returns_400_if_not_allowed_to_send_int_sms( + self, + client, + notify_db_session, + ): + service = create_service(service_permissions=[SMS_TYPE]) + template = create_template(service=service) - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["status_code"] == 400 - assert error_json["errors"] == [{"error": "BadRequestError", "message": "Template not found"}] + data = {"phone_number": "+20-12-1234-1234", "template_id": template.id} + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) -@pytest.mark.parametrize( - "notification_type, key_send_to, send_to", - [ - ("sms", "phone_number", "+16502532222"), - ("email", "email_address", "sample@email.com"), - ( - "letter", - "personalisation", - {"address_line_1": "The queen", "postcode": "SW1 1AA"}, - ), - ], -) -def test_post_notification_returns_401_and_well_formed_auth_error( - client, sample_template, notification_type, key_send_to, send_to -): - data = {key_send_to: send_to, "template_id": str(sample_template.id)} + assert response.status_code == 400 + assert response.headers["Content-type"] == "application/json" - response = client.post( - path="/v2/notifications/{}".format(notification_type), - data=json.dumps(data), - headers=[("Content-Type", "application/json")], + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["status_code"] == 400 + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "Cannot send to international mobile numbers", + } + ] + + def test_post_sms_notification_with_archived_reply_to_id_returns_400(self, client, sample_template): + archived_sender = create_service_sms_sender(sample_template.service, "12345", is_default=False, archived=True) + data = { + "phone_number": "+16502532222", + "template_id": sample_template.id, + "sms_sender_id": archived_sender.id, + } + auth_header = create_authorization_header(service_id=sample_template.service_id) + response = client.post( + path="v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert ( + "sms_sender_id {} does not exist in database for service id {}".format(archived_sender.id, sample_template.service_id) + in resp_json["errors"][0]["message"] + ) + assert "BadRequestError" in resp_json["errors"][0]["error"] + + @pytest.mark.parametrize( + "recipient,label,permission_type, notification_type,expected_error", + [ + ("6502532222", "phone_number", "email", "sms", "text messages"), + ("someone@test.com", "email_address", "sms", "email", "emails"), + ], ) + def test_post_sms_notification_returns_400_if_not_allowed_to_send_notification( + self, + notify_db_session, + client, + recipient, + label, + permission_type, + notification_type, + expected_error, + ): + service = create_service(service_permissions=[permission_type]) + sample_template_without_permission = create_template(service=service, template_type=notification_type) + data = {label: recipient, "template_id": sample_template_without_permission.id} + auth_header = create_authorization_header(service_id=sample_template_without_permission.service.id) + + response = client.post( + path="/v2/notifications/{}".format(sample_template_without_permission.template_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == 400 + assert response.headers["Content-type"] == "application/json" + + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["status_code"] == 400 + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "Service is not allowed to send {}".format(expected_error), + } + ] + + @pytest.mark.parametrize("restricted", [True, False]) + def test_post_sms_notification_returns_400_if_number_not_safelisted(self, notify_db_session, client, restricted): + service = create_service(restricted=restricted, service_permissions=[SMS_TYPE, INTERNATIONAL_SMS_TYPE]) + template = create_template(service=service) + create_api_key(service=service, key_type="team") - assert response.status_code == 401 - assert response.headers["Content-type"] == "application/json" - error_resp = json.loads(response.get_data(as_text=True)) - assert error_resp["status_code"] == 401 - assert error_resp["errors"] == [ - { - "error": "AuthError", - "message": "Unauthorized, authentication token must be provided", + data = { + "phone_number": "+16132532235", + "template_id": template.id, } - ] + auth_header = create_authorization_header(service_id=service.id, key_type="team") + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) -@pytest.mark.parametrize( - "notification_type, key_send_to, send_to", - [ - ("sms", "phone_number", "+16502532222"), - ("email", "email_address", "sample@email.com"), - ], -) -def test_notification_returns_400_and_for_schema_problems(client, sample_template, notification_type, key_send_to, send_to): - data = {key_send_to: send_to, "template": str(sample_template.id)} - auth_header = create_authorization_header(service_id=sample_template.service_id) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["status_code"] == 400 + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": f"Can’t send to this recipient using a team-only API key (service {service.id}) " + f'- see {get_document_url("en", "keys.html#team-and-safelist")}', + } + ] - response = client.post( - path="/v2/notifications/{}".format(notification_type), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) + def test_post_notification_raises_bad_request_if_not_valid_notification_type(self, client, sample_service): + auth_header = create_authorization_header(service_id=sample_service.id) + response = client.post( + "/v2/notifications/foo", + data="{}", + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 404 + error_json = json.loads(response.get_data(as_text=True)) + assert "The requested URL was not found on the server." in error_json["message"] + + @pytest.mark.parametrize("notification_type", ["sms", "email"]) + def test_post_notification_with_wrong_type_of_sender( + self, client, sample_template, sample_email_template, notification_type, fake_uuid + ): + if notification_type == EMAIL_TYPE: + template = sample_email_template + form_label = "sms_sender_id" + data = { + "email_address": "test@test.com", + "template_id": str(sample_email_template.id), + form_label: fake_uuid, + } + elif notification_type == SMS_TYPE: + template = sample_template + form_label = "email_reply_to_id" + data = { + "phone_number": "+16502532222", + "template_id": str(template.id), + form_label: fake_uuid, + } + auth_header = create_authorization_header(service_id=template.service_id) - assert response.status_code == 400 - assert response.headers["Content-type"] == "application/json" - error_resp = json.loads(response.get_data(as_text=True)) - assert error_resp["status_code"] == 400 - assert { - "error": "ValidationError", - "message": "template_id is a required property", - } in error_resp["errors"] - assert { - "error": "ValidationError", - "message": "Additional properties are not allowed (template was unexpected)", - } in error_resp["errors"] - - -@pytest.mark.parametrize("reference", [None, "reference_from_client"]) -def test_post_email_notification_returns_201(notify_api, client, sample_email_template_with_placeholders, mocker, reference): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - mocked = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - data = { - "email_address": sample_email_template_with_placeholders.service.users[0].email_address, - "template_id": sample_email_template_with_placeholders.id, - "personalisation": {"name": "Bob"}, - } - if reference: - data.update({"reference": reference}) - auth_header = create_authorization_header(service_id=sample_email_template_with_placeholders.service_id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_email_response) == resp_json - notification = Notification.query.one() - assert notification.status == NOTIFICATION_CREATED - assert notification.postage is None - assert resp_json["id"] == str(notification.id) - assert resp_json["reference"] == reference - assert notification.reference is None - assert notification.reply_to_text is None - assert resp_json["content"]["body"] == sample_email_template_with_placeholders.content.replace("((name))", "Bob") - assert resp_json["content"]["subject"] == sample_email_template_with_placeholders.subject.replace("((name))", "Bob") - assert resp_json["content"]["from_email"] == "{}@{}".format( - sample_email_template_with_placeholders.service.email_from, - current_app.config["NOTIFY_EMAIL_DOMAIN"], - ) - assert "v2/notifications/{}".format(notification.id) in resp_json["uri"] - assert resp_json["template"]["id"] == str(sample_email_template_with_placeholders.id) - assert resp_json["template"]["version"] == sample_email_template_with_placeholders.version - assert ( - "services/{}/templates/{}".format( - str(sample_email_template_with_placeholders.service_id), - str(sample_email_template_with_placeholders.id), - ) - in resp_json["template"]["uri"] - ) - assert not resp_json["scheduled_for"] - assert mocked.called + response = client.post( + path="/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert "Additional properties are not allowed ({} was unexpected)".format(form_label) in resp_json["errors"][0]["message"] + assert "ValidationError" in resp_json["errors"][0]["error"] + def test_post_email_notification_with_invalid_reply_to_id_returns_400(self, client, sample_email_template, mocker, fake_uuid): + mocker.patch("app.email_normal_publish.publish") + data = { + "email_address": sample_email_template.service.users[0].email_address, + "template_id": sample_email_template.id, + "email_reply_to_id": fake_uuid, + } + auth_header = create_authorization_header(service_id=sample_email_template.service_id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert ( + "email_reply_to_id {} does not exist in database for service id {}".format( + fake_uuid, sample_email_template.service_id + ) + in resp_json["errors"][0]["message"] + ) + assert "BadRequestError" in resp_json["errors"][0]["error"] + + def test_post_email_notification_with_archived_reply_to_id_returns_400(self, client, sample_email_template, mocker): + archived_reply_to = create_reply_to_email( + sample_email_template.service, + "reply_to@test.com", + is_default=False, + archived=True, + ) + mocker.patch("app.email_normal_publish.publish") + data = { + "email_address": "test@test.com", + "template_id": sample_email_template.id, + "email_reply_to_id": archived_reply_to.id, + } + auth_header = create_authorization_header(service_id=sample_email_template.service_id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert ( + "email_reply_to_id {} does not exist in database for service id {}".format( + archived_reply_to.id, sample_email_template.service_id + ) + in resp_json["errors"][0]["message"] + ) + assert "BadRequestError" in resp_json["errors"][0]["error"] -@pytest.mark.parametrize("reference", [None, "reference_from_client"]) -def test_post_email_notification_returns_201_with_celery_persistence( - notify_api, client, sample_email_template_with_placeholders, mocker, reference -): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = 1 - mocked = mocker.patch("app.celery.tasks.save_email.apply_async") - data = { - "email_address": sample_email_template_with_placeholders.service.users[0].email_address, - "template_id": sample_email_template_with_placeholders.id, - "personalisation": {"name": "Bob"}, - } - if reference: - data.update({"reference": reference}) - auth_header = create_authorization_header(service_id=sample_email_template_with_placeholders.service_id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_email_response) == resp_json - assert resp_json["reference"] == reference - assert resp_json["content"]["body"] == sample_email_template_with_placeholders.content.replace("((name))", "Bob") - assert resp_json["content"]["subject"] == sample_email_template_with_placeholders.subject.replace("((name))", "Bob") - assert resp_json["content"]["from_email"] == "{}@{}".format( - sample_email_template_with_placeholders.service.email_from, - current_app.config["NOTIFY_EMAIL_DOMAIN"], - ) - assert "v2/notifications/{}".format(resp_json["id"]) in resp_json["uri"] - assert resp_json["template"]["id"] == str(sample_email_template_with_placeholders.id) - assert resp_json["template"]["version"] == sample_email_template_with_placeholders.version - assert ( - "services/{}/templates/{}".format( - str(sample_email_template_with_placeholders.service_id), - str(sample_email_template_with_placeholders.id), - ) - in resp_json["template"]["uri"] + @pytest.mark.parametrize( + "personalisation_size, expected_success", + [ + (1024 * 50 + 100, False), + (1024 * 50 - 100, True), + ], ) - assert not resp_json["scheduled_for"] - assert mocked.called + def test_post_email_notification_with_personalisation_too_large( + self, notify_api, client, sample_email_template_with_placeholders, mocker, personalisation_size, expected_success + ): + mocked = mocker.patch("app.email_normal_publish.publish") + + data = { + "email_address": sample_email_template_with_placeholders.service.users[0].email_address, + "template_id": sample_email_template_with_placeholders.id, + "personalisation": {"name": random_sized_content(size=personalisation_size)}, + "reference": "reference_from_client", + } + + auth_header = create_authorization_header(service_id=sample_email_template_with_placeholders.service_id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + + if expected_success: + assert mocked.called + assert response.status_code == 201 + else: + resp_json = json.loads(response.get_data(as_text=True)) + assert not mocked.called + assert response.status_code == 400 + assert "ValidationError" in resp_json["errors"][0]["error"] + assert ( + f"Personalisation variables size of {personalisation_size} bytes is greater than allowed limit of 51200 bytes" + in resp_json["errors"][0]["message"] + ) + + def test_post_notification_returns_400_when_get_json_throws_exception(self, client, sample_email_template): + auth_header = create_authorization_header(service_id=sample_email_template.service_id) + response = client.post( + path="v2/notifications/email", + data="[", + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + + def test_too_long_sms_returns_400(self, client, notify_db, notify_db_session): + service = create_service(sms_daily_limit=10, message_limit=100) + auth_header = create_authorization_header(service_id=service.id) + + max_size_template_content = ( + 612 - len(service.name) - 2 + ) # 612 is the max size of an sms, minus the service name that we append, minus 2 for the space and the colon which we append (i.e. "service name: ") + # create a template with content that is too long + template = create_sample_template( + notify_db, notify_db_session, service=service, template_type="sms", content="a" * (max_size_template_content + 1) + ) + + response = client.post( + path="/v2/notifications/sms", + data=json.dumps({"phone_number": "+16502532222", "template_id": template.id}), + headers=[("Content-Type", "application/json"), auth_header], + ) + + assert response.status_code == 400 + assert response.headers["Content-type"] == "application/json" + error_resp = json.loads(response.get_data(as_text=True)) + assert error_resp["status_code"] == 400 + assert "has a character count greater than" in str(response.data) @pytest.mark.parametrize( @@ -498,7 +783,7 @@ def test_post_email_notification_returns_201_with_celery_persistence( def test_should_not_persist_or_send_notification_if_simulated_recipient( client, recipient, notification_type, sample_email_template, sample_template, mocker ): - apply_async = mocker.patch("app.celery.provider_tasks.deliver_{}.apply_async".format(notification_type)) + mock_publish = mocker.patch("app.{}_normal_publish.publish".format(notification_type)) if notification_type == "sms": data = {"phone_number": recipient, "template_id": str(sample_template.id)} @@ -514,8 +799,8 @@ def test_should_not_persist_or_send_notification_if_simulated_recipient( ) assert response.status_code == 201 - apply_async.assert_not_called() assert json.loads(response.get_data(as_text=True))["id"] + mock_publish.assert_not_called() assert Notification.query.count() == 0 @@ -537,16 +822,13 @@ def test_send_notification_uses_appropriate_queue_according_to_template_process_ send_to, process_type, ): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - mocker.patch("app.celery.provider_tasks.deliver_{}.apply_async".format(notification_type)) + mock_publish = mocker.patch("app.{}_{}_publish.publish".format(notification_type, process_type)) sample = create_template( service=sample_service, template_type=notification_type, process_type=process_type, ) - mocked = mocker.patch("app.celery.provider_tasks.deliver_{}.apply_async".format(notification_type)) - data = {key_send_to: send_to, "template_id": str(sample.id)} auth_header = create_authorization_header(service_id=sample.service_id) @@ -557,1089 +839,1597 @@ def test_send_notification_uses_appropriate_queue_according_to_template_process_ headers=[("Content-Type", "application/json"), auth_header], ) - notification_id = json.loads(response.data)["id"] - assert response.status_code == 201 - mocked.assert_called_once_with([notification_id], queue=f"{process_type}-tasks") + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data[key_send_to] + + +class TestRestrictedServices: + @pytest.mark.parametrize( + "notification_type,to_key,to,response_code", + [ + ("sms", "phone_number", "+16132532235", 201), + ("email", "email_address", "test@example.com", 201), + ("sms", "phone_number", "+16132532230", 400), + ("email", "email_address", "bad@example.com", 400), + ], + ) + def test_team_keys_only_send_to_team_members( + self, notify_db_session, client, mocker, notify_api, notification_type, to_key, to, response_code + ): + service = create_service(restricted=True, service_permissions=[EMAIL_TYPE, SMS_TYPE, INTERNATIONAL_SMS_TYPE]) + user = create_user(mobile_number="+16132532235", email="test@example.com") + service.users = [user] + template = create_template(service=service, template_type=notification_type) + create_api_key(service=service, key_type="team") + redis_publish = mocker.patch(f"app.{notification_type}_normal_publish.publish") + data = { + to_key: to, + "template_id": template.id, + } + auth_header = create_authorization_header(service_id=service.id, key_type="team") + response = client.post( + path=f"/v2/notifications/{notification_type}", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) -@pytest.mark.parametrize( - "notification_type, key_send_to, send_to", - [ - ("sms", "phone_number", "6502532222"), - ("email", "email_address", "sample@email.com"), - ], -) -def test_returns_a_429_limit_exceeded_if_rate_limit_exceeded( - notify_api, client, sample_service, mocker, notification_type, key_send_to, send_to -): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - sample = create_template(service=sample_service, template_type=notification_type) - persist_mock = mocker.patch("app.v2.notifications.post_notifications.persist_notification") - deliver_mock = mocker.patch("app.v2.notifications.post_notifications.send_notification_to_queue") - mocker.patch( - "app.v2.notifications.post_notifications.check_rate_limiting", - side_effect=RateLimitError("LIMIT", "INTERVAL", "TYPE"), - ) - - data = {key_send_to: send_to, "template_id": str(sample.id)} - - auth_header = create_authorization_header(service_id=sample.service_id) - - response = client.post( - path="/v2/notifications/{}".format(notification_type), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - error = json.loads(response.data)["errors"][0]["error"] - message = json.loads(response.data)["errors"][0]["message"] - status_code = json.loads(response.data)["status_code"] - assert response.status_code == 429 - assert error == "RateLimitError" - assert message == "Exceeded rate limit for key type TYPE of LIMIT requests per INTERVAL seconds" - assert status_code == 429 - - assert not persist_mock.called - assert not deliver_mock.called - - -def test_post_sms_notification_returns_400_if_not_allowed_to_send_int_sms( - client, - notify_db_session, -): - service = create_service(service_permissions=[SMS_TYPE]) - template = create_template(service=service) - - data = {"phone_number": "+20-12-1234-1234", "template_id": template.id} - auth_header = create_authorization_header(service_id=service.id) - - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + assert response.status_code == response_code + assert json.loads(response.get_data(as_text=True)) + if response_code == 201: + assert redis_publish.called + else: + assert redis_publish.called is False + + @pytest.mark.parametrize( + "notification_type,to_key,to_a, to_b,response_code", + [ + ("email", "email_address", "foo@example.com", "bar@example.com", 201), + ("sms", "phone_number", "+16132532231", "+16132532232", 201), + ("email", "email_address", "foo@example.com", "error@example.com", 400), + ("sms", "phone_number", "+16132532231", "+16132532233", 400), + ], ) + def test_team_keys_only_send_to_team_members_bulk_endpoint( + self, notify_db_session, client, mocker, notification_type, to_key, to_a, to_b, response_code + ): + service = create_service( + restricted=True, + service_permissions=[EMAIL_TYPE, SMS_TYPE], + ) + user_1 = create_user(mobile_number="+16132532231", email="foo@example.com") + user_2 = create_user(mobile_number="+16132532232", email="bar@example.com") + service.users = [user_1, user_2] + template = create_template(service=service, template_type=notification_type) + create_api_key(service=service, key_type="team") + job_id = str(uuid.uuid4()) + mocker.patch("app.v2.notifications.post_notifications.create_bulk_job", return_value=job_id) - assert response.status_code == 400 - assert response.headers["Content-type"] == "application/json" - - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["status_code"] == 400 - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "Cannot send to international mobile numbers", + data = { + "name": "job_name", + "template_id": template.id, + "csv": rows_to_csv([[to_key], [to_a], [to_b]]), } - ] - - -def test_post_sms_notification_with_archived_reply_to_id_returns_400(client, sample_template): - archived_sender = create_service_sms_sender(sample_template.service, "12345", is_default=False, archived=True) - data = { - "phone_number": "+16502532222", - "template_id": sample_template.id, - "sms_sender_id": archived_sender.id, - } - auth_header = create_authorization_header(service_id=sample_template.service_id) - response = client.post( - path="v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert ( - "sms_sender_id {} does not exist in database for service id {}".format(archived_sender.id, sample_template.service_id) - in resp_json["errors"][0]["message"] - ) - assert "BadRequestError" in resp_json["errors"][0]["error"] - + auth_header = create_authorization_header(service_id=service.id, key_type="team") + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == response_code + assert json.loads(response.get_data(as_text=True)) -@pytest.mark.parametrize( - "recipient,label,permission_type, notification_type,expected_error", - [ - ("6502532222", "phone_number", "email", "sms", "text messages"), - ("someone@test.com", "email_address", "sms", "email", "emails"), - ], -) -def test_post_sms_notification_returns_400_if_not_allowed_to_send_notification( - notify_db_session, - client, - recipient, - label, - permission_type, - notification_type, - expected_error, -): - service = create_service(service_permissions=[permission_type]) - sample_template_without_permission = create_template(service=service, template_type=notification_type) - data = {label: recipient, "template_id": sample_template_without_permission.id} - auth_header = create_authorization_header(service_id=sample_template_without_permission.service.id) - response = client.post( - path="/v2/notifications/{}".format(sample_template_without_permission.template_type), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], +class TestSchedulingSends: + @pytest.mark.parametrize( + "notification_type, key_send_to, send_to", + [ + ("sms", "phone_number", "6502532222"), + ("email", "email_address", "sample@email.com"), + ], ) - - assert response.status_code == 400 - assert response.headers["Content-type"] == "application/json" - - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["status_code"] == 400 - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "Service is not allowed to send {}".format(expected_error), + @freeze_time("2017-05-14 14:00:00") + def test_post_notification_with_scheduled_for(self, client, notify_db_session, notification_type, key_send_to, send_to): + service = create_service( + service_name=str(uuid.uuid4()), + service_permissions=[EMAIL_TYPE, SMS_TYPE, SCHEDULE_NOTIFICATIONS], + ) + template = create_template(service=service, template_type=notification_type) + data = { + key_send_to: send_to, + "template_id": str(template.id) if notification_type == EMAIL_TYPE else str(template.id), + "scheduled_for": "2017-05-14 14:15", } - ] - + auth_header = create_authorization_header(service_id=service.id) -@pytest.mark.parametrize("restricted", [True, False]) -def test_post_sms_notification_returns_400_if_number_not_safelisted(notify_db_session, client, restricted): - service = create_service(restricted=restricted, service_permissions=[SMS_TYPE, INTERNATIONAL_SMS_TYPE]) - template = create_template(service=service) - create_api_key(service=service, key_type="team") - - data = { - "phone_number": "+16132532235", - "template_id": template.id, - } - auth_header = create_authorization_header(service_id=service.id, key_type="team") - - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + response = client.post( + "/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + scheduled_notification = ScheduledNotification.query.filter_by(notification_id=resp_json["id"]).all() + assert len(scheduled_notification) == 1 + assert resp_json["id"] == str(scheduled_notification[0].notification_id) + assert resp_json["scheduled_for"] == "2017-05-14 14:15" + + @pytest.mark.parametrize( + "notification_type, key_send_to, send_to", + [ + ("sms", "phone_number", "6502532222"), + ("email", "email_address", "sample@email.com"), + ], ) - - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["status_code"] == 400 - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "Can’t send to this recipient using a team-only API key " - f'- see {get_document_url("en", "keys.html#team-and-safelist")}', + @freeze_time("2017-05-14 14:00:00") + def test_post_notification_raises_bad_request_if_service_not_invited_to_schedule( + self, + client, + sample_template, + sample_email_template, + notification_type, + key_send_to, + send_to, + ): + data = { + key_send_to: send_to, + "template_id": str(sample_email_template.id) if notification_type == EMAIL_TYPE else str(sample_template.id), + "scheduled_for": "2017-05-14 14:15", } - ] - - -# TODO: duplicate -def test_post_sms_notification_returns_201_if_allowed_to_send_int_sms( - notify_api, - sample_service, - sample_template, - client, - mocker, -): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - - data = {"phone_number": "+20-12-1234-1234", "template_id": sample_template.id} - auth_header = create_authorization_header(service_id=sample_service.id) + auth_header = create_authorization_header(service_id=sample_template.service_id) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - - assert response.status_code == 201 - assert response.headers["Content-type"] == "application/json" - - -# TODO: duplicate -def test_post_sms_notification_returns_201_if_allowed_to_send_int_sms_with_celery_persistence( - notify_api, - sample_service, - sample_template, - client, - mocker, -): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = 1 - mocker.patch("app.celery.tasks.save_sms.apply_async") + response = client.post( + "/v2/notifications/{}".format(notification_type), + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "Cannot schedule notifications (this feature is invite-only)", + } + ] - data = {"phone_number": "+20-12-1234-1234", "template_id": sample_template.id} - auth_header = create_authorization_header(service_id=sample_service.id) - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], +class TestSendingDocuments: + @pytest.mark.parametrize( + "filename, file_data, sending_method", + [ + ("good name.txt", "VGV4dCBjb250ZW50IGhlcmU=", "attach"), + ("good name.txt", "VGV4dCBjb250ZW50IGhlcmU=", "link"), + ], ) + def test_post_notification_with_document_upload( + self, notify_api, client, notify_db_session, mocker, filename, file_data, sending_method + ): + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + content = "See attached file." + if sending_method == "link": + content = "Document: ((document))" + template = create_template(service=service, template_type="email", content=content) + + statsd_mock = mocker.patch("app.v2.notifications.post_notifications.statsd_client") + mock_publish = mocker.patch("app.email_normal_publish.publish") + document_download_mock = mocker.patch("app.v2.notifications.post_notifications.document_download_client.upload_document") + document_response = document_download_response({"sending_method": sending_method, "mime_type": "text/plain"}) + document_download_mock.return_value = document_response + decoded_file = base64.b64decode(file_data) - assert response.status_code == 201 - assert response.headers["Content-type"] == "application/json" - - -def test_post_sms_should_persist_supplied_sms_number(notify_api, client, sample_template_with_placeholders, mocker): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - mocked = mocker.patch("app.celery.provider_tasks.deliver_sms.apply_async") - data = { - "phone_number": "+16502532222", - "template_id": str(sample_template_with_placeholders.id), - "personalisation": {" Name": "Jo"}, - } - - auth_header = create_authorization_header(service_id=sample_template_with_placeholders.service_id) - - response = client.post( - path="/v2/notifications/sms", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - notifications = Notification.query.all() - assert len(notifications) == 1 - notification_id = notifications[0].id - assert "+16502532222" == notifications[0].to - assert resp_json["id"] == str(notification_id) - assert mocked.called + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": { + "document": { + "file": file_data, + "filename": filename, + "sending_method": sending_method, + } + }, + } + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) -@pytest.mark.parametrize( - "notification_type, key_send_to, send_to", - [ - ("sms", "phone_number", "6502532222"), - ("email", "email_address", "sample@email.com"), - ], -) -@freeze_time("2017-05-14 14:00:00") -def test_post_notification_with_scheduled_for(client, notify_db_session, notification_type, key_send_to, send_to): - service = create_service( - service_name=str(uuid.uuid4()), - service_permissions=[EMAIL_TYPE, SMS_TYPE, SCHEDULE_NOTIFICATIONS], - ) - template = create_template(service=service, template_type=notification_type) - data = { - key_send_to: send_to, - "template_id": str(template.id) if notification_type == EMAIL_TYPE else str(template.id), - "scheduled_for": "2017-05-14 14:15", - } - auth_header = create_authorization_header(service_id=service.id) + assert response.status_code == 201, response.get_data(as_text=True) + resp_json = json.loads(response.get_data(as_text=True)) + assert validate(resp_json, post_email_response) == resp_json + document_download_mock.assert_called_once_with( + service.id, + {"file": decoded_file, "filename": filename, "sending_method": sending_method}, + ) - response = client.post( - "/v2/notifications/{}".format(notification_type), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + mock_publish_args = mock_publish.call_args.args[0] + mock_publish_args_unsigned = signer_notification.verify(mock_publish_args) + assert mock_publish_args_unsigned["to"] == data["email_address"] + assert mock_publish_args_unsigned["id"] == resp_json["id"] + + if sending_method == "link": + assert resp_json["content"]["body"] == f"Document: {document_response}" + else: + assert resp_json["content"]["body"] == "See attached file." + + assert statsd_mock.incr.call_args_list == [ + call("attachments.nb-attachments.count-1"), + call("attachments.nb-attachments", count=1), + call(f"attachments.services.{service.id}", count=1), + call(f"attachments.templates.{template.id}", count=1), + call(f"attachments.sending-method.{sending_method}"), + call("attachments.file-type.text/plain"), + call("attachments.file-size.0-1mb"), + ] + + @pytest.mark.parametrize( + "filename, sending_method, attachment_size, expected_success", + [ + ("attached_file.txt", "attach", 1024 * 1024 * 10 + 100, False), + ("linked_file.txt", "link", 1024 * 1024 * 10 + 100, False), + ("attached_file.txt", "attach", 1024 * 1024 * 10 - 100, True), + ("linked_file.txt", "link", 1024 * 1024 * 10 - 100, True), + ], ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - scheduled_notification = ScheduledNotification.query.filter_by(notification_id=resp_json["id"]).all() - assert len(scheduled_notification) == 1 - assert resp_json["id"] == str(scheduled_notification[0].notification_id) - assert resp_json["scheduled_for"] == "2017-05-14 14:15" + def test_post_notification_with_document_too_large( + self, notify_api, client, notify_db_session, mocker, filename, sending_method, attachment_size, expected_success + ): + mocked = mocker.patch("app.email_normal_publish.publish") + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + content = "See attached file." + if sending_method == "link": + content = "Document: ((document))" + template = create_template(service=service, template_type="email", content=content) + mocker.patch("app.v2.notifications.post_notifications.statsd_client") + document_download_mock = mocker.patch("app.v2.notifications.post_notifications.document_download_client.upload_document") + document_response = document_download_response({"sending_method": sending_method, "mime_type": "text/plain"}) + document_download_mock.return_value = document_response -@pytest.mark.parametrize( - "notification_type, key_send_to, send_to", - [ - ("sms", "phone_number", "6502532222"), - ("email", "email_address", "sample@email.com"), - ], -) -@freeze_time("2017-05-14 14:00:00") -def test_post_notification_raises_bad_request_if_service_not_invited_to_schedule( - client, - sample_template, - sample_email_template, - notification_type, - key_send_to, - send_to, -): - data = { - key_send_to: send_to, - "template_id": str(sample_email_template.id) if notification_type == EMAIL_TYPE else str(sample_template.id), - "scheduled_for": "2017-05-14 14:15", - } - auth_header = create_authorization_header(service_id=sample_template.service_id) + file_data = random_sized_content(size=attachment_size) + encoded_file = base64.b64encode(file_data.encode()).decode() - response = client.post( - "/v2/notifications/{}".format(notification_type), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "Cannot schedule notifications (this feature is invite-only)", + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": { + "document": { + "file": encoded_file, + "filename": filename, + "sending_method": sending_method, + } + }, } - ] + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) -def test_post_notification_raises_bad_request_if_not_valid_notification_type(client, sample_service): - auth_header = create_authorization_header(service_id=sample_service.id) - response = client.post( - "/v2/notifications/foo", - data="{}", - headers=[("Content-Type", "application/json"), auth_header], + if expected_success: + assert mocked.called + assert response.status_code == 201 + else: + resp_json = json.loads(response.get_data(as_text=True)) + assert not mocked.called + assert response.status_code == 400 + assert "ValidationError" in resp_json["errors"][0]["error"] + assert filename in resp_json["errors"][0]["message"] + assert "and greater than allowed limit of" in resp_json["errors"][0]["message"] + + @pytest.mark.parametrize( + "sending_method, attachment_number, expected_success", + [ + ("attach", 9, True), + ("link", 9, True), + ("attach", 10, True), + ("link", 10, True), + ("attach", 11, False), + ("link", 11, False), + ], ) - assert response.status_code == 404 - error_json = json.loads(response.get_data(as_text=True)) - assert "The requested URL was not found on the server." in error_json["message"] - + def test_post_notification_with_too_many_documents( + self, notify_api, client, notify_db_session, mocker, sending_method, attachment_number, expected_success + ): + mocked = mocker.patch("app.email_normal_publish.publish") + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + template_content = "See attached file.\n" + if sending_method == "link": + for i in range(0, attachment_number): + template_content = template_content + f"Document: ((doc-{i}))\n" + template = create_template(service=service, template_type="email", content=template_content) + + mocker.patch("app.v2.notifications.post_notifications.statsd_client") + document_download_mock = mocker.patch("app.v2.notifications.post_notifications.document_download_client.upload_document") + document_response = document_download_response({"sending_method": sending_method, "mime_type": "text/plain"}) + document_download_mock.return_value = document_response + + documents = {} + for i in range(0, attachment_number): + file_data = random_sized_content() + encoded_file = base64.b64encode(file_data.encode()).decode() + documents[f"doc-{i}"] = { + "file": encoded_file, + "filename": f"doc-{i}", + "sending_method": sending_method, + } -@pytest.mark.parametrize("notification_type", ["sms", "email"]) -def test_post_notification_with_wrong_type_of_sender( - client, sample_template, sample_email_template, notification_type, fake_uuid -): - if notification_type == EMAIL_TYPE: - template = sample_email_template - form_label = "sms_sender_id" data = { - "email_address": "test@test.com", - "template_id": str(sample_email_template.id), - form_label: fake_uuid, + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": documents, } - elif notification_type == SMS_TYPE: - template = sample_template - form_label = "email_reply_to_id" - data = { - "phone_number": "+16502532222", - "template_id": str(template.id), - form_label: fake_uuid, - } - auth_header = create_authorization_header(service_id=template.service_id) - response = client.post( - path="/v2/notifications/{}".format(notification_type), - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert "Additional properties are not allowed ({} was unexpected)".format(form_label) in resp_json["errors"][0]["message"] - assert "ValidationError" in resp_json["errors"][0]["error"] - - -def test_post_email_notification_with_valid_reply_to_id_returns_201(notify_api, client, sample_email_template, mocker): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - reply_to_email = create_reply_to_email(sample_email_template.service, "test@test.com") - mocked = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - data = { - "email_address": sample_email_template.service.users[0].email_address, - "template_id": sample_email_template.id, - "email_reply_to_id": reply_to_email.id, - } - auth_header = create_authorization_header(service_id=sample_email_template.service_id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_email_response) == resp_json - notification = Notification.query.first() - assert notification.reply_to_text == "test@test.com" - assert resp_json["id"] == str(notification.id) - assert mocked.called - - assert notification.reply_to_text == reply_to_email.email_address - - -def test_post_email_notification_with_invalid_reply_to_id_returns_400(client, sample_email_template, mocker, fake_uuid): - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - data = { - "email_address": sample_email_template.service.users[0].email_address, - "template_id": sample_email_template.id, - "email_reply_to_id": fake_uuid, - } - auth_header = create_authorization_header(service_id=sample_email_template.service_id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert ( - "email_reply_to_id {} does not exist in database for service id {}".format(fake_uuid, sample_email_template.service_id) - in resp_json["errors"][0]["message"] - ) - assert "BadRequestError" in resp_json["errors"][0]["error"] - - -def test_post_email_notification_with_archived_reply_to_id_returns_400(client, sample_email_template, mocker): - archived_reply_to = create_reply_to_email( - sample_email_template.service, - "reply_to@test.com", - is_default=False, - archived=True, - ) - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - data = { - "email_address": "test@test.com", - "template_id": sample_email_template.id, - "email_reply_to_id": archived_reply_to.id, - } - auth_header = create_authorization_header(service_id=sample_email_template.service_id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert ( - "email_reply_to_id {} does not exist in database for service id {}".format( - archived_reply_to.id, sample_email_template.service_id + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], ) - in resp_json["errors"][0]["message"] - ) - assert "BadRequestError" in resp_json["errors"][0]["error"] - - -@pytest.mark.parametrize( - "filename, file_data, sending_method", - [ - ("good name.txt", "VGV4dCBjb250ZW50IGhlcmU=", "attach"), - ("good name.txt", "VGV4dCBjb250ZW50IGhlcmU=", "link"), - ], -) -def test_post_notification_with_document_upload( - notify_api, client, notify_db_session, mocker, filename, file_data, sending_method -): - notify_api.config["FF_NOTIFICATION_CELERY_PERSISTENCE"] = False - service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) - content = "See attached file." - if sending_method == "link": - content = "Document: ((document))" - template = create_template(service=service, template_type="email", content=content) - - statsd_mock = mocker.patch("app.v2.notifications.post_notifications.statsd_client") - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - document_download_mock = mocker.patch("app.v2.notifications.post_notifications.document_download_client.upload_document") - document_response = document_download_response({"sending_method": sending_method, "mime_type": "text/plain"}) - document_download_mock.return_value = document_response - decoded_file = base64.b64decode(file_data) - - data = { - "email_address": service.users[0].email_address, - "template_id": template.id, - "personalisation": { - "document": { - "file": file_data, - "filename": filename, - "sending_method": sending_method, - } - }, - } - - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) - assert response.status_code == 201, response.get_data(as_text=True) - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_email_response) == resp_json - document_download_mock.assert_called_once_with( - service.id, - {"file": decoded_file, "filename": filename, "sending_method": sending_method}, + if expected_success: + assert mocked.called + assert response.status_code == 201 + else: + resp_json = json.loads(response.get_data(as_text=True)) + assert not mocked.called + assert response.status_code == 400 + assert "ValidationError" in resp_json["errors"][0]["error"] + assert ( + f"File number exceed allowed limits of 10 with number of {attachment_number}." + in resp_json["errors"][0]["message"] + ) + + @pytest.mark.parametrize( + "filename, file_data, sending_method", + [ + ("", "VGV4dCBjb250ZW50IGhlcmU=", "attach"), + ("1", "VGV4dCBjb250ZW50IGhlcmU=", "attach"), + ], ) + def test_post_notification_with_document_upload_bad_filename( + self, client, notify_db_session, filename, file_data, sending_method + ): + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + content = "See attached file." + template = create_template(service=service, template_type="email", content=content) + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": { + "document": { + "file": file_data, + "filename": filename, + "sending_method": sending_method, + } + }, + } - notification = Notification.query.one() - assert notification.status == NOTIFICATION_CREATED - assert notification.personalisation == {"document": document_response} - - if sending_method == "link": - assert resp_json["content"]["body"] == f"Document: {document_response}" - else: - assert resp_json["content"]["body"] == "See attached file." - - assert statsd_mock.incr.call_args_list == [ - call("attachments.nb-attachments.count-1"), - call("attachments.nb-attachments", count=1), - call(f"attachments.services.{service.id}", count=1), - call(f"attachments.templates.{template.id}", count=1), - call(f"attachments.sending-method.{sending_method}"), - call("attachments.file-type.text/plain"), - call("attachments.file-size.0-1mb"), - ] - - -@pytest.mark.parametrize( - "filename, file_data, sending_method", - [ - ("", "VGV4dCBjb250ZW50IGhlcmU=", "attach"), - ("1", "VGV4dCBjb250ZW50IGhlcmU=", "attach"), - ], -) -def test_post_notification_with_document_upload_bad_filename(client, notify_db_session, filename, file_data, sending_method): - service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) - content = "See attached file." - template = create_template(service=service, template_type="email", content=content) - data = { - "email_address": service.users[0].email_address, - "template_id": template.id, - "personalisation": { - "document": { - "file": file_data, - "filename": filename, - "sending_method": sending_method, - } - }, - } + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert "ValidationError" in resp_json["errors"][0]["error"] + assert filename in resp_json["errors"][0]["message"] + assert "too short" in resp_json["errors"][0]["message"] + + def test_post_notification_with_document_upload_long_filename( + self, + client, + notify_db_session, + ): + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + content = "See attached file." + template = create_template(service=service, template_type="email", content=content) + file_data = "VGV4dCBjb250ZW50IGhlcmU=" + filename = "a" * 256 + sending_method = "attach" - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert "ValidationError" in resp_json["errors"][0]["error"] - assert filename in resp_json["errors"][0]["message"] - assert "too short" in resp_json["errors"][0]["message"] + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": { + "document": { + "file": file_data, + "filename": filename, + "sending_method": sending_method, + } + }, + } + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) -def test_post_notification_with_document_upload_long_filename( - client, - notify_db_session, -): - service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) - content = "See attached file." - template = create_template(service=service, template_type="email", content=content) - file_data = "VGV4dCBjb250ZW50IGhlcmU=" - filename = "a" * 256 - sending_method = "attach" - - data = { - "email_address": service.users[0].email_address, - "template_id": template.id, - "personalisation": { - "document": { - "file": file_data, - "filename": filename, - "sending_method": sending_method, - } - }, - } + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert "ValidationError" in resp_json["errors"][0]["error"] + assert filename in resp_json["errors"][0]["message"] + assert "too long" in resp_json["errors"][0]["message"] - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + @pytest.mark.parametrize( + "file_data, sending_method", + [ + ("VGV4dCBjb250ZW50IGhlcmU=", "attach"), + ], ) + def test_post_notification_with_document_upload_filename_required_check( + self, client, notify_db_session, file_data, sending_method + ): + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + content = "See attached file." + template = create_template(service=service, template_type="email", content=content) + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": {"document": {"file": file_data, "sending_method": sending_method}}, + } - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert "ValidationError" in resp_json["errors"][0]["error"] - assert filename in resp_json["errors"][0]["message"] - assert "too long" in resp_json["errors"][0]["message"] + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert "ValidationError" in resp_json["errors"][0]["error"] + assert "filename is a required property" in resp_json["errors"][0]["message"] -@pytest.mark.parametrize( - "file_data, sending_method", - [ - ("VGV4dCBjb250ZW50IGhlcmU=", "attach"), - ], -) -def test_post_notification_with_document_upload_filename_required_check(client, notify_db_session, file_data, sending_method): - service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) - content = "See attached file." - template = create_template(service=service, template_type="email", content=content) - data = { - "email_address": service.users[0].email_address, - "template_id": template.id, - "personalisation": {"document": {"file": file_data, "sending_method": sending_method}}, - } - - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + @pytest.mark.parametrize( + "file_data", + [ + ("VGV4dCBjb250ZW50IGhlcmU="), + ], ) + def test_post_notification_with_document_upload_missing_sending_method( + self, + client, + notify_db_session, + file_data, + ): + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + content = "See attached file." + template = create_template(service=service, template_type="email", content=content) + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": {"document": {"file": file_data}}, + } - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert "ValidationError" in resp_json["errors"][0]["error"] - assert "filename is a required property" in resp_json["errors"][0]["message"] + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert "ValidationError" in resp_json["errors"][0]["error"] + assert "sending_method is a required property" in resp_json["errors"][0]["message"] -@pytest.mark.parametrize( - "file_data", - [ - ("VGV4dCBjb250ZW50IGhlcmU="), - ], -) -def test_post_notification_with_document_upload_missing_sending_method( - client, - notify_db_session, - file_data, -): - service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) - content = "See attached file." - template = create_template(service=service, template_type="email", content=content) - data = { - "email_address": service.users[0].email_address, - "template_id": template.id, - "personalisation": {"document": {"file": file_data}}, - } - - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + @pytest.mark.parametrize( + "file_data, sending_method, filename", + [ + ("VGV4dCBjb250ZW50IGhlcmU=", "attch", "1.txt"), + ], ) + def test_post_notification_with_document_upload_bad_sending_method( + self, client, notify_db_session, file_data, sending_method, filename + ): + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + content = "See attached file." + template = create_template(service=service, template_type="email", content=content) + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": { + "document": { + "file": file_data, + "filename": filename, + "sending_method": sending_method, + } + }, + } - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert "ValidationError" in resp_json["errors"][0]["error"] - assert "sending_method is a required property" in resp_json["errors"][0]["message"] - + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) -@pytest.mark.parametrize( - "file_data, sending_method, filename", - [ - ("VGV4dCBjb250ZW50IGhlcmU=", "attch", "1.txt"), - ], -) -def test_post_notification_with_document_upload_bad_sending_method( - client, notify_db_session, file_data, sending_method, filename -): - service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) - content = "See attached file." - template = create_template(service=service, template_type="email", content=content) - data = { - "email_address": service.users[0].email_address, - "template_id": template.id, - "personalisation": { - "document": { - "file": file_data, - "filename": filename, - "sending_method": sending_method, - } - }, - } + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert f"personalisation {sending_method} is not one of [attach, link]" in resp_json["errors"][0]["message"] - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], + @pytest.mark.parametrize( + "file_data, message", + [ + ("abc", "Incorrect padding"), + ("🤡", "string argument should contain only ASCII characters"), + ], ) + def test_post_notification_with_document_upload_not_base64_file( + self, + client, + notify_db_session, + file_data, + message, + ): + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + content = "See attached file." + template = create_template(service=service, template_type="email", content=content) + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": { + "document": { + "file": file_data, + "sending_method": "attach", + "filename": "1.txt", + } + }, + } - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert f"personalisation {sending_method} is not one of [attach, link]" in resp_json["errors"][0]["message"] - - -@pytest.mark.parametrize( - "file_data, message", - [ - ("abc", "Incorrect padding"), - ("🤡", "string argument should contain only ASCII characters"), - ], -) -def test_post_notification_with_document_upload_not_base64_file( - client, - notify_db_session, - file_data, - message, -): - service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) - content = "See attached file." - template = create_template(service=service, template_type="email", content=content) - data = { - "email_address": service.users[0].email_address, - "template_id": template.id, - "personalisation": { - "document": { - "file": file_data, - "sending_method": "attach", - "filename": "1.txt", - } - }, - } + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) + assert response.status_code == 400 + resp_json = json.loads(response.get_data(as_text=True)) + assert f"{message} : Error decoding base64 field" in resp_json["errors"][0]["message"] - assert response.status_code == 400 - resp_json = json.loads(response.get_data(as_text=True)) - assert f"{message} : Error decoding base64 field" in resp_json["errors"][0]["message"] + def test_post_notification_with_document_upload_simulated(self, client, notify_db_session, mocker): + service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) + template = create_template(service=service, template_type="email", content="Document: ((document))") + mocker.patch("app.email_normal_publish.publish") + document_download_mock = mocker.patch("app.v2.notifications.post_notifications.document_download_client") + document_download_mock.get_upload_url.return_value = "https://document-url" -def test_post_notification_with_document_upload_simulated(client, notify_db_session, mocker): - service = create_service(service_permissions=[EMAIL_TYPE, UPLOAD_DOCUMENT]) - template = create_template(service=service, template_type="email", content="Document: ((document))") + data = { + "email_address": "simulate-delivered@notification.canada.ca", + "template_id": template.id, + "personalisation": {"document": {"file": "abababab", "sending_method": "link"}}, + } - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - document_download_mock = mocker.patch("app.v2.notifications.post_notifications.document_download_client") - document_download_mock.get_upload_url.return_value = "https://document-url" + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) - data = { - "email_address": "simulate-delivered@notification.canada.ca", - "template_id": template.id, - "personalisation": {"document": {"file": "abababab", "sending_method": "link"}}, - } + assert response.status_code == 201 + resp_json = json.loads(response.get_data(as_text=True)) + assert validate(resp_json, post_email_response) == resp_json - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) + assert resp_json["content"]["body"] == "Document: https://document-url/test-document" - assert response.status_code == 201 - resp_json = json.loads(response.get_data(as_text=True)) - assert validate(resp_json, post_email_response) == resp_json + def test_post_notification_without_document_upload_permission(self, client, notify_db_session, mocker): + service = create_service(service_permissions=[EMAIL_TYPE]) + template = create_template(service=service, template_type="email", content="Document: ((document))") - assert resp_json["content"]["body"] == "Document: https://document-url/test-document" + mocker.patch("app.email_normal_publish.publish") + document_download_mock = mocker.patch("app.v2.notifications.post_notifications.document_download_client") + document_download_mock.upload_document.return_value = document_download_response() + data = { + "email_address": service.users[0].email_address, + "template_id": template.id, + "personalisation": {"document": {"file": "abababab"}}, + } -def test_post_notification_without_document_upload_permission(client, notify_db_session, mocker): - service = create_service(service_permissions=[EMAIL_TYPE]) - template = create_template(service=service, template_type="email", content="Document: ((document))") + auth_header = create_authorization_header(service_id=service.id) + response = client.post( + path="v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) - mocker.patch("app.celery.provider_tasks.deliver_email.apply_async") - document_download_mock = mocker.patch("app.v2.notifications.post_notifications.document_download_client") - document_download_mock.upload_document.return_value = document_download_response() + assert response.status_code == 400 - data = { - "email_address": service.users[0].email_address, - "template_id": template.id, - "personalisation": {"document": {"file": "abababab"}}, - } - auth_header = create_authorization_header(service_id=service.id) - response = client.post( - path="v2/notifications/email", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) +class TestSMSSendFragments: + def test_post_sms_enough_messages_left(self, notify_api, client, notify_db, notify_db_session, mocker): + mocker.patch("app.sms_normal_publish.publish") + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, content=500 * "a", service=service, template_type="sms") + data = { + "phone_number": "+16502532222", + "template_id": str(template.id), + "personalisation": {" Name": "Jo"}, + } + for x in range(6): + create_sample_notification(notify_db, notify_db_session, service=service) + auth_header = create_authorization_header(service_id=template.service_id) + + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + + def test_post_sms_not_enough_messages_left(self, notify_api, client, notify_db, notify_db_session, mocker): + mocker.patch("app.sms_normal_publish.publish") + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, content=500 * "a", service=service, template_type="sms") + data = { + "phone_number": "+16502532222", + "template_id": str(template.id), + "personalisation": {" Name": "Jo"}, + } + for x in range(10): + create_sample_notification(notify_db, notify_db_session, service=service) + auth_header = create_authorization_header(service_id=template.service_id) + + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 429 + + +class TestSMSMessageCounter: + # Testing API one-off: + # - Sending using TEST, NORMAL, and TEAM API keys with a simulated phone number should not count towards limits + # TODO: update these params when we fix https://github.com/cds-snc/notification-planning/issues/855 and remove the xfao; + @pytest.mark.parametrize( + "key_type", [KEY_TYPE_TEST, KEY_TYPE_NORMAL, pytest.param(KEY_TYPE_TEAM, marks=pytest.mark.xfail(raises=AssertionError))] + ) + def test_API_ONEOFF_post_sms_with_test_key_does_not_count_towards_limits( + self, notify_api, client, notify_db, notify_db_session, mocker, key_type + ): + # test setup + mocker.patch("app.sms_normal_publish.publish") + increment_todays_requested_sms_count = mocker.patch("app.notifications.validators.increment_todays_requested_sms_count") + + def __send_sms(): + api_key = ApiKey( + service=service, + name="test_key", + created_by=template.created_by, + key_type=key_type, + ) + save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" + + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), + ], + ) + return response + + # Create a service, Set limit to 10 fragments + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, content="Hello", service=service, template_type="sms") + data = { + "phone_number": "+16132532222", + "template_id": str(template.id), + "personalisation": {" Name": "Jo"}, + } - assert response.status_code == 400 + response = __send_sms() + + assert response.status_code == 201 + assert not increment_todays_requested_sms_count.called + + # Testing API BULK: + # - Sending using TEST API key with ALL simulated phone numbers should not count towards limits + # TODO: update these params when we fix https://github.com/cds-snc/notification-planning/issues/855 and remove the xfao; + @pytest.mark.parametrize( + "key_type", [KEY_TYPE_TEST, KEY_TYPE_NORMAL, pytest.param(KEY_TYPE_TEAM, marks=pytest.mark.xfail(raises=AssertionError))] + ) + def test_API_BULK_post_sms_with_test_key_does_not_count_towards_limits( + self, notify_api, client, notify_db, notify_db_session, mocker, key_type + ): + # test setup + mocker.patch("app.sms_normal_publish.publish") + mocker.patch("app.v2.notifications.post_notifications.create_bulk_job", return_value=str(uuid.uuid4())) + increment_todays_requested_sms_count = mocker.patch("app.notifications.validators.increment_todays_requested_sms_count") + + def __send_sms(): + api_key = ApiKey( + service=service, + name="test_key", + created_by=template.created_by, + key_type=key_type, + ) + save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" + + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + response = client.post( + path="/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), + ], + ) + return response + + # Create a service, Set limit to 10 fragments + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, content="Hello", service=service, template_type="sms") + data = { + "name": "Bulk send name", + "template_id": str(template.id), + "rows": [["phone number"], ["+16132532222"], ["+16132532223"], ["+16132532224"]], + } + response = __send_sms() + + assert response.status_code == 201 + assert not increment_todays_requested_sms_count.called + + # Testing API BULK: + # - Throw an error if a user mixes testing and non-testing numbers with a LIVE or TEAM key + # - Allow mixing if its a TEST key + @pytest.mark.parametrize("key_type", [KEY_TYPE_TEST, KEY_TYPE_NORMAL, KEY_TYPE_TEAM]) + def test_API_BULK_post_sms_with_mixed_numbers(self, notify_api, client, notify_db, notify_db_session, mocker, key_type): + # test setup + mocker.patch("app.sms_normal_publish.publish") + mocker.patch("app.v2.notifications.post_notifications.create_bulk_job", return_value=str(uuid.uuid4())) + increment_todays_requested_sms_count = mocker.patch("app.notifications.validators.increment_todays_requested_sms_count") + + def __send_sms(): + api_key = ApiKey( + service=service, + name="test_key", + created_by=template.created_by, + key_type=key_type, + ) + save_model_api_key(api_key) + api_key_secret = get_unsigned_secret(api_key.id) + unsigned_secret = f"gcntfy-keyname-{api_key.service_id}-{api_key_secret}" + + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + response = client.post( + path="/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + ("Authorization", f"ApiKey-v1 {unsigned_secret}"), + ], + ) + return response + + # Create a service, Set limit to 10 fragments + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, content="Hello", service=service, template_type="sms") + data = { + "name": "Bulk send name", + "template_id": str(template.id), + "rows": [["phone number"], ["+16132532222"], ["+16132532223"], ["+16135555555"]], + } -def test_post_notification_returns_400_when_get_json_throws_exception(client, sample_email_template): - auth_header = create_authorization_header(service_id=sample_email_template.service_id) - response = client.post( - path="v2/notifications/email", - data="[", - headers=[("Content-Type", "application/json"), auth_header], + response = __send_sms() + resp_json = json.loads(response.get_data(as_text=True)) + + # If the key is a test key, then the request should succeed + if key_type == KEY_TYPE_TEST: + assert response.status_code == 201 + assert not increment_todays_requested_sms_count.called + else: + assert resp_json["errors"][0]["error"] == "BadRequestError" + + # Testing ADMIN one-off: + # - Sending using TEST phone numbers (i.e. +16132532222) should not count towards limits + def test_ADMIN_ONEOFF_post_sms_with_test_phone_number_does_not_count_towards_limits( + self, notify_api, client, notify_db, notify_db_session, mocker + ): + # test setup + mocker.patch("app.sms_normal_publish.publish") + mocker.patch("app.service.send_notification.send_notification_to_queue") + increment_todays_requested_sms_count = mocker.patch("app.notifications.validators.increment_todays_requested_sms_count") + + def __send_sms(): + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + token = create_jwt_token( + current_app.config["ADMIN_CLIENT_SECRET"], client_id=current_app.config["ADMIN_CLIENT_USER_NAME"] + ) + response = client.post( + f"/service/{template.service_id}/send-notification", + json={ + "to": "+16132532222", + "template_id": str(template.id), + "created_by": service.users[0].id, + "personalisation": {"var": "var"}, + }, + headers={"Authorization": f"Bearer {token}"}, + ) + return response + + # Create a service, tempalte + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, content="a" * 400, service=service, template_type="sms") + + response = __send_sms() + + assert response.status_code == 201 + assert not increment_todays_requested_sms_count.called + + # Testing ADMIN CSV: + # - Sending using ALL TEST phone numbers (i.e. +16132532222) should succeed and not increment their daily usage + # - Sending using test+non-test phone numbers should fail + @pytest.mark.parametrize( + "expected_status_code, phone_numbers", + [ + (201, "\r\n+16132532222\r\n+16132532222"), + (400, "\r\n+16132532222\r\n+15555555555"), + ], ) - assert response.status_code == 400 - + def test_ADMIN_CSV_post_sms_with_test_phone_number_does_not_count_towards_limits( + self, notify_api, client, notify_db, notify_db_session, mocker, expected_status_code, phone_numbers + ): + # test setup + mocker.patch("app.sms_normal_publish.publish") + mocker.patch("app.service.send_notification.send_notification_to_queue") + mocker.patch("app.celery.tasks.process_job.apply_async") + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value=f"phone number{phone_numbers}", + ) + increment_todays_requested_sms_count = mocker.patch("app.notifications.validators.increment_todays_requested_sms_count") + + def __send_sms(): + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + mocker.patch( + "app.job.rest.get_job_metadata_from_s3", + return_value={ + "template_id": str(template.id), + "original_file_name": "thisisatest.csv", + "notification_count": "1", + "valid": "True", + }, + ) + + token = create_jwt_token( + current_app.config["ADMIN_CLIENT_SECRET"], client_id=current_app.config["ADMIN_CLIENT_USER_NAME"] + ) + response = client.post( + f"/service/{template.service_id}/job", + json={ + "id": str(uuid.uuid4()), + "created_by": service.users[0].id, + }, + headers={"Authorization": f"Bearer {token}"}, + ) + return response + + # Create a service, template + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, content="Hello", service=service, template_type="sms") + + response = __send_sms() # 8/10 fragments + assert response.status_code == expected_status_code + assert not increment_todays_requested_sms_count.called + + +class TestEmailsAndLimitsForSMSFragments: + # API + def test_API_ONEOFF_sends_warning_emails_and_blocks_sends(self, notify_api, client, notify_db, notify_db_session, mocker): + # test setup + mocker.patch("app.sms_normal_publish.publish") + send_warning_email = mocker.patch("app.notifications.validators.send_near_sms_limit_email") + send_limit_reached_email = mocker.patch("app.notifications.validators.send_sms_limit_reached_email") + + def __send_sms(): + auth_header = create_authorization_header(service_id=template.service_id) + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + return response + + # Create a service, Set limit to 10 fragments + service = create_service(sms_daily_limit=10, message_limit=100) + + # Create 7 notifications in the db + template = create_sample_template(notify_db, notify_db_session, content="Hello", service=service, template_type="sms") + data = { + "phone_number": "+16502532222", + "template_id": str(template.id), + "personalisation": {" Name": "Jo"}, + } + for x in range(7): + create_sample_notification(notify_db, notify_db_session, service=service) + + __send_sms() # send 8th fragment + assert send_warning_email.called + + __send_sms() # Send 9th fragment + __send_sms() # Send 10th fragment + assert send_limit_reached_email.called + + response = __send_sms() # send the 11th fragment + assert response.status_code == 429 # Ensure send is blocked + + def test_API_BULK_sends_warning_emails_and_blocks_sends(self, notify_api, client, notify_db, notify_db_session, mocker): + # test setup + mocker.patch("app.sms_normal_publish.publish") + mocker.patch("app.v2.notifications.post_notifications.create_bulk_job", return_value=str(uuid.uuid4())) + send_warning_email = mocker.patch("app.notifications.validators.send_near_sms_limit_email") + send_limit_reached_email = mocker.patch("app.notifications.validators.send_sms_limit_reached_email") + + def __send_sms(): + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + data = { + "name": "job_name", + "template_id": str(template.id), + "rows": [["phone number"], ["9025551234"]], + } + + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=template.service_id), + ], + ) + return response + + # Create a service, Set limit to 10 fragments + service = create_service(sms_daily_limit=10, message_limit=100) + + # Create 7 notifications in the db + template = create_sample_template(notify_db, notify_db_session, content="Hello", service=service, template_type="sms") + for x in range(7): + create_sample_notification(notify_db, notify_db_session, service=service) + + __send_sms() # send 8th fragment + assert send_warning_email.called + + __send_sms() # Send 9th fragment + __send_sms() # Send 10th fragment + assert send_limit_reached_email.called + + response = __send_sms() # send the 11th fragment + assert response.status_code == 400 # Ensure send is blocked - not sure why we send a 400 here and a 429 everywhere else + + # ADMIN + def test_ADMIN_ONEOFF_sends_warning_emails_and_blocks_sends(self, notify_api, client, notify_db, notify_db_session, mocker): + # test setup + mocker.patch("app.sms_normal_publish.publish") + + mocker.patch("app.service.send_notification.send_notification_to_queue") + send_warning_email = mocker.patch("app.notifications.validators.send_near_sms_limit_email") + send_limit_reached_email = mocker.patch("app.notifications.validators.send_sms_limit_reached_email") + + def __send_sms(): + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + token = create_jwt_token( + current_app.config["ADMIN_CLIENT_SECRET"], client_id=current_app.config["ADMIN_CLIENT_USER_NAME"] + ) + response = client.post( + f"/service/{template.service_id}/send-notification", + json={ + "to": "9025551234", + "template_id": str(template.id), + "created_by": service.users[0].id, + "personalisation": {"var": "var"}, + }, + headers={"Authorization": f"Bearer {token}"}, + ) + return response + + # Create a service, Set limit to 10 fragments + service = create_service(sms_daily_limit=10, message_limit=100) + + # Create 7 notifications in the db + template = create_sample_template(notify_db, notify_db_session, content="Hello", service=service, template_type="sms") + for x in range(7): + create_sample_notification(notify_db, notify_db_session, service=service) + + __send_sms() # 8/10 fragments used + assert send_warning_email.called + + __send_sms() # 9/10 fragments used + __send_sms() # 10/10 fragments used + assert send_limit_reached_email.called + + response = __send_sms() # 11/10 fragments + assert response.status_code == 429 # Ensure send is blocked + + def test_ADMIN_CSV_sends_warning_emails_and_blocks_sends(self, notify_api, client, notify_db, notify_db_session, mocker): + # test setup + mocker.patch("app.sms_normal_publish.publish") + mocker.patch("app.service.send_notification.send_notification_to_queue") + mocker.patch("app.celery.tasks.process_job.apply_async") + mocker.patch( + "app.job.rest.get_job_from_s3", + return_value="phone number\r\n6502532222", + ) + send_warning_email = mocker.patch("app.notifications.validators.send_near_sms_limit_email") + send_limit_reached_email = mocker.patch("app.notifications.validators.send_sms_limit_reached_email") + + def __send_sms(): + with set_config_values(notify_api, {"REDIS_ENABLED": True}): + mocker.patch( + "app.job.rest.get_job_metadata_from_s3", + return_value={ + "template_id": str(template.id), + "original_file_name": "thisisatest.csv", + "notification_count": "1", + "valid": "True", + }, + ) + + token = create_jwt_token( + current_app.config["ADMIN_CLIENT_SECRET"], client_id=current_app.config["ADMIN_CLIENT_USER_NAME"] + ) + response = client.post( + f"/service/{template.service_id}/job", + json={ + "id": str(uuid.uuid4()), + "created_by": service.users[0].id, + }, + headers={"Authorization": f"Bearer {token}"}, + ) + return response + + # Create a service, Set limit to 10 fragments + service = create_service(sms_daily_limit=10, message_limit=100) + + # Create 7 notifications in the db + template = create_sample_template(notify_db, notify_db_session, content="Hello", service=service, template_type="sms") + for x in range(7): + create_sample_notification(notify_db, notify_db_session, service=service) + + __send_sms() # 8/10 fragments + assert send_warning_email.called + + __send_sms() # 9/10 fragments + __send_sms() # 10/10 fragments + assert send_limit_reached_email.called + + response = __send_sms() # 11/10 fragments + assert response.status_code == 429 # Ensure send is blocked + + +class TestBulkSend: + @pytest.mark.parametrize("args", [{}, {"rows": [1, 2], "csv": "foo"}], ids=["no args", "both args"]) + def test_post_bulk_with_invalid_data_arguments( + self, + client, + sample_email_template, + args, + ): + data = {"name": "job_name", "template_id": str(sample_email_template.id)} | args -@pytest.mark.parametrize("args", [{}, {"rows": [1, 2], "csv": "foo"}], ids=["no args", "both args"]) -def test_post_bulk_with_invalid_data_arguments( - client, - sample_email_template, - args, -): - data = {"name": "job_name", "template_id": str(sample_email_template.id)} | args + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=sample_email_template.service_id), + ], + ) - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sample_email_template.service_id)], - ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "You should specify either rows or csv", + } + ] - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "You should specify either rows or csv", + def test_post_bulk_with_invalid_reply_to_id(self, client, sample_email_template): + data = { + "name": "job_name", + "template_id": str(sample_email_template.id), + "rows": [["email address"], ["bob@example.com"]], + "reply_to_id": "foo", } - ] - -def test_post_bulk_with_invalid_reply_to_id(client, sample_email_template): - data = { - "name": "job_name", - "template_id": str(sample_email_template.id), - "rows": [["email address"], ["bob@example.com"]], - "reply_to_id": "foo", - } + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=sample_email_template.service_id), + ], + ) - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sample_email_template.service_id)], - ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "ValidationError", + "message": "reply_to_id is not a valid UUID", + } + ] - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "ValidationError", - "message": "reply_to_id is not a valid UUID", + def test_post_bulk_with_non_existing_reply_to_id_for_email(self, client, sample_email_template, fake_uuid): + data = { + "name": "job_name", + "template_id": str(sample_email_template.id), + "rows": [["email address"], ["bob@example.com"]], + "reply_to_id": fake_uuid, } - ] - -def test_post_bulk_with_non_existing_reply_to_id_for_email(client, sample_email_template, fake_uuid): - data = { - "name": "job_name", - "template_id": str(sample_email_template.id), - "rows": [["email address"], ["bob@example.com"]], - "reply_to_id": fake_uuid, - } + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=sample_email_template.service_id), + ], + ) - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sample_email_template.service_id)], - ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": f"email_reply_to_id {fake_uuid} does not exist in database for service id {sample_email_template.service_id}", + } + ] - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": f"email_reply_to_id {fake_uuid} does not exist in database for service id {sample_email_template.service_id}", + def test_post_bulk_with_non_existing_reply_to_id_for_sms(self, client, sms_code_template, fake_uuid): + data = { + "name": "job_name", + "template_id": str(sms_code_template.id), + "rows": [["phone number", "verify_code"], ["bob@example.com", "123"]], + "reply_to_id": fake_uuid, } - ] + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sms_code_template.service_id)], + ) + + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": f"sms_sender_id {fake_uuid} does not exist in database for service id {sms_code_template.service_id}", + } + ] -def test_post_bulk_with_non_existing_reply_to_id_for_sms(client, sms_code_template, fake_uuid): - data = { - "name": "job_name", - "template_id": str(sms_code_template.id), - "rows": [["phone number", "verify_code"], ["bob@example.com", "123"]], - "reply_to_id": fake_uuid, - } + def test_post_bulk_flags_if_name_is_missing(self, client, sample_email_template): + data = {"template_id": str(sample_email_template.id), "csv": "foo"} - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sms_code_template.service_id)], + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=sample_email_template.service_id), + ], + ) + + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [{"error": "ValidationError", "message": "name is a required property"}] + + @pytest.mark.parametrize( + "scheduled_for, expected_message", + [ + (42, "scheduled_for 42 is not of type string, null"), + ( + "foo", + "scheduled_for datetime format is invalid. It must be a valid " + "ISO8601 date time format, " + "https://en.wikipedia.org/wiki/ISO_8601", + ), + ("2016-01-01T10:04:00", "scheduled_for datetime cannot be in the past"), + ("2016-01-05T10:06:00", "scheduled_for datetime can only be up to 96 hours in the future"), + ], ) + @freeze_time("2016-01-01 10:05:00") + def test_post_bulk_with_invalid_scheduled_for(self, client, sample_email_template, scheduled_for, expected_message): + data = {"name": "job_name", "template_id": str(sample_email_template.id), "scheduled_for": scheduled_for, "rows": [1, 2]} - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": f"sms_sender_id {fake_uuid} does not exist in database for service id {sms_code_template.service_id}", - } - ] + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=sample_email_template.service_id), + ], + ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [{"error": "ValidationError", "message": expected_message}] -def test_post_bulk_flags_if_name_is_missing(client, sample_email_template): - data = {"template_id": str(sample_email_template.id), "csv": "foo"} + def test_post_bulk_with_non_existing_template(self, client, fake_uuid, sample_email_template): + data = {"name": "job_name", "template_id": fake_uuid, "rows": [1, 2]} - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sample_email_template.service_id)], - ) + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=sample_email_template.service_id), + ], + ) - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [{"error": "ValidationError", "message": "name is a required property"}] + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [{"error": "BadRequestError", "message": "Template not found"}] + def test_post_bulk_with_archived_template(self, client, fake_uuid, notify_db, notify_db_session): + template = create_sample_template(notify_db, notify_db_session, archived=True) + data = {"name": "job_name", "template_id": template.id, "rows": [1, 2]} -@pytest.mark.parametrize( - "scheduled_for, expected_message", - [ - (42, "scheduled_for 42 is not of type string, null"), - ( - "foo", - "scheduled_for datetime format is invalid. It must be a valid " - "ISO8601 date time format, " - "https://en.wikipedia.org/wiki/ISO_8601", - ), - ("2016-01-01T10:04:00", "scheduled_for datetime cannot be in the past"), - ("2016-01-05T10:06:00", "scheduled_for datetime can only be up to 96 hours in the future"), - ], -) -@freeze_time("2016-01-01 10:05:00") -def test_post_bulk_with_invalid_scheduled_for(client, sample_email_template, scheduled_for, expected_message): - data = {"name": "job_name", "template_id": str(sample_email_template.id), "scheduled_for": scheduled_for, "rows": [1, 2]} + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], + ) - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sample_email_template.service_id)], - ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [{"error": "BadRequestError", "message": f"Template {template.id} has been deleted"}] - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [{"error": "ValidationError", "message": expected_message}] + @pytest.mark.parametrize( + "permission_type, notification_type, expected_error", + [ + ("email", "sms", "text messages"), + ("sms", "email", "emails"), + ], + ) + def test_post_bulk_returns_400_if_not_allowed_to_send_notification_type( + self, + notify_db_session, + client, + permission_type, + notification_type, + expected_error, + ): + service = create_service(service_permissions=[permission_type]) + sample_template_without_permission = create_template(service=service, template_type=notification_type) + data = {"name": "job_name", "template_id": sample_template_without_permission.id, "rows": [1, 2]} + auth_header = create_authorization_header(service_id=sample_template_without_permission.service.id) + response = client.post( + path="/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) -def test_post_bulk_with_non_existing_template(client, fake_uuid, sample_email_template): - data = {"name": "job_name", "template_id": fake_uuid, "rows": [1, 2]} + assert response.status_code == 400 + assert response.headers["Content-type"] == "application/json" - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sample_email_template.service_id)], + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["status_code"] == 400 + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": f"Service is not allowed to send {expected_error}", + } + ] + + @pytest.mark.parametrize("data_type", ["rows", "csv"]) + @pytest.mark.parametrize( + "template_type, content, row_header, expected_error", + [ + ("email", "Hello!", ["foo"], "email address"), + ("email", "Hello ((name))!", ["foo"], "email address, name"), + ("sms", "Hello ((name))!", ["foo"], "name, phone number"), + ("sms", "Hello ((name))!", ["foo"], "name, phone number"), + ("sms", "Hello ((name))!", ["name"], "phone number"), + ("sms", "Hello ((name))!", ["NAME"], "phone number"), + ], ) + def test_post_bulk_flags_missing_column_headers( + self, client, notify_db, notify_db_session, data_type, template_type, content, row_header, expected_error + ): + template = create_sample_template(notify_db, notify_db_session, content=content, template_type=template_type) + data = {"name": "job_name", "template_id": template.id} + rows = [row_header, ["bar"]] + if data_type == "csv": + data["csv"] = rows_to_csv(rows) + else: + data["rows"] = rows - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [{"error": "BadRequestError", "message": "Template not found"}] + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], + ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [{"error": "BadRequestError", "message": f"Missing column headers: {expected_error}"}] + + @pytest.mark.parametrize( + "template_type, content, row_header, expected_error", + [ + ( + "email", + "Hello!", + ["email address", "email address"], + "email address", + ), + ( + "email", + "Hello ((name))!", + ["email address", "email_address", "name"], + "email address, email_address", + ), + ("sms", "Hello!", ["phone number", "phone number"], "phone number"), + ( + "sms", + "Hello!", + ["phone number", "phone_number"], + "phone number, phone_number", + ), + ( + "sms", + "Hello ((name))!", + ["phone number", "phone_number", "name"], + "phone number, phone_number", + ), + ], + ) + def test_post_bulk_flags_duplicate_recipient_column_headers( + self, + client, + notify_db, + notify_db_session, + template_type, + content, + row_header, + expected_error, + ): + template = create_sample_template(notify_db, notify_db_session, content=content, template_type=template_type) + data = {"name": "job_name", "template_id": template.id, "rows": [row_header, ["bar"]]} -def test_post_bulk_with_archived_template(client, fake_uuid, notify_db, notify_db_session): - template = sample_template(notify_db, notify_db_session, archived=True) - data = {"name": "job_name", "template_id": template.id, "rows": [1, 2]} + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], + ) - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], - ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [{"error": "BadRequestError", "message": f"Duplicate column headers: {expected_error}"}] - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [{"error": "BadRequestError", "message": f"Template {template.id} has been deleted"}] + def test_post_bulk_flags_too_many_rows(self, client, sample_email_template, notify_api): + data = { + "name": "job_name", + "template_id": sample_email_template.id, + "csv": rows_to_csv([["email address"], ["foo@example.com"], ["bar@example.com"]]), + } + with set_config(notify_api, "CSV_MAX_ROWS", 1): + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=sample_email_template.service_id), + ], + ) + + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "Too many rows. Maximum number of rows allowed is 1", + } + ] -@pytest.mark.parametrize( - "permission_type, notification_type, expected_error", - [ - ("email", "sms", "text messages"), - ("sms", "email", "emails"), - ], -) -def test_post_bulk_returns_400_if_not_allowed_to_send_notification_type( - notify_db_session, - client, - permission_type, - notification_type, - expected_error, -): - service = create_service(service_permissions=[permission_type]) - sample_template_without_permission = create_template(service=service, template_type=notification_type) - data = {"name": "job_name", "template_id": sample_template_without_permission.id, "rows": [1, 2]} - auth_header = create_authorization_header(service_id=sample_template_without_permission.service.id) + def test_post_bulk_flags_recipient_not_in_safelist_with_team_api_key(self, client, sample_email_template): + data = { + "name": "job_name", + "template_id": sample_email_template.id, + "csv": rows_to_csv([["email address"], ["foo@example.com"], ["bar@example.com"]]), + } - response = client.post( - path="/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), auth_header], - ) + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=sample_email_template.service_id, key_type="team"), + ], + ) - assert response.status_code == 400 - assert response.headers["Content-type"] == "application/json" + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "You cannot send to these recipients because you used a team and safelist API key.", + } + ] - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["status_code"] == 400 - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": f"Service is not allowed to send {expected_error}", + def test_post_bulk_flags_recipient_not_in_safelist_with_restricted_service(self, client, notify_db, notify_db_session): + service = create_service(restricted=True) + template = create_sample_template(notify_db, notify_db_session, service=service, template_type="email") + data = { + "name": "job_name", + "template_id": template.id, + "csv": rows_to_csv([["email address"], ["foo@example.com"], ["bar@example.com"]]), } - ] - -@pytest.mark.parametrize("data_type", ["rows", "csv"]) -@pytest.mark.parametrize( - "template_type, content, row_header, expected_error", - [ - ("email", "Hello!", ["foo"], "email address"), - ("email", "Hello ((name))!", ["foo"], "email address, name"), - ("sms", "Hello ((name))!", ["foo"], "name, phone number"), - ("sms", "Hello ((name))!", ["foo"], "name, phone number"), - ("sms", "Hello ((name))!", ["name"], "phone number"), - ("sms", "Hello ((name))!", ["NAME"], "phone number"), - ], -) -def test_post_bulk_flags_missing_column_headers( - client, notify_db, notify_db_session, data_type, template_type, content, row_header, expected_error -): - template = sample_template(notify_db, notify_db_session, content=content, template_type=template_type) - data = {"name": "job_name", "template_id": template.id} - rows = [row_header, ["bar"]] - if data_type == "csv": - data["csv"] = rows_to_csv(rows) - else: - data["rows"] = rows + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=template.service_id, key_type="team"), + ], + ) - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], - ) + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "You cannot send to these recipients because your service is in trial mode. You can only send to members of your team and your safelist.", + } + ] - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [{"error": "BadRequestError", "message": f"Missing column headers: {expected_error}"}] + def test_post_bulk_flags_not_enough_remaining_messages(self, client, notify_db, notify_db_session, mocker): + service = create_service(message_limit=10) + template = create_sample_template(notify_db, notify_db_session, service=service, template_type="email") + messages_count_mock = mocker.patch( + "app.v2.notifications.post_notifications.fetch_todays_total_message_count", return_value=9 + ) + data = { + "name": "job_name", + "template_id": template.id, + "csv": rows_to_csv([["email address"], ["foo@example.com"], ["bar@example.com"]]), + } + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], + ) -@pytest.mark.parametrize( - "template_type, content, row_header, expected_error", - [ - ( - "email", - "Hello!", - ["email address", "email address"], - "email address", - ), - ( - "email", - "Hello ((name))!", - ["email address", "email_address", "name"], - "email address, email_address", - ), - ("sms", "Hello!", ["phone number", "phone number"], "phone number"), - ( - "sms", - "Hello!", - ["phone number", "phone_number"], - "phone number, phone_number", - ), - ( - "sms", - "Hello ((name))!", - ["phone number", "phone_number", "name"], - "phone number, phone_number", - ), - ], -) -def test_post_bulk_flags_duplicate_recipient_column_headers( - client, - notify_db, - notify_db_session, - template_type, - content, - row_header, - expected_error, -): - template = sample_template(notify_db, notify_db_session, content=content, template_type=template_type) - data = {"name": "job_name", "template_id": template.id, "rows": [row_header, ["bar"]]} + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "You only have 1 remaining messages before you reach your daily limit. You've tried to send 2 messages.", + } + ] + messages_count_mock.assert_called_once() + + def test_post_bulk_flags_not_enough_remaining_sms_messages(self, notify_api, client, notify_db, notify_db_session, mocker): + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, service=service, template_type="sms") + mocker.patch("app.v2.notifications.post_notifications.fetch_todays_total_message_count", return_value=9) + messages_count_mock = mocker.patch( + "app.v2.notifications.post_notifications.fetch_todays_requested_sms_count", return_value=9 + ) + data = { + "name": "job_name", + "template_id": template.id, + "csv": rows_to_csv([["phone number"], ["6135551234"], ["6135551234"]]), + } - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], - ) + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], + ) - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [{"error": "BadRequestError", "message": f"Duplicate column headers: {expected_error}"}] + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "You only have 1 remaining sms messages before you reach your daily limit. You've tried to send 2 sms messages.", + } + ] + messages_count_mock.assert_called_once() + + @pytest.mark.parametrize("data_type", ["rows", "csv"]) + def test_post_bulk_flags_rows_with_errors(self, client, notify_db, notify_db_session, data_type): + template = create_sample_template(notify_db, notify_db_session, template_type="email", content="Hello ((name))") + data = {"name": "job_name", "template_id": template.id} + rows = [ + ["email address", "name"], + ["foo@example.com", "Foo"], + ["bar@example.com"], + ["nope", "nope"], + ["baz@example.com", ""], + ["baz@example.com", " "], + ] + if data_type == "csv": + data["csv"] = rows_to_csv(rows) + else: + data["rows"] = rows + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], + ) -def test_post_bulk_flags_too_many_rows(client, sample_email_template, notify_api): - data = { - "name": "job_name", - "template_id": sample_email_template.id, - "csv": rows_to_csv([["email address"], ["foo@example.com"], ["bar@example.com"]]), - } + assert response.status_code == 400 + error_json = json.loads(response.get_data(as_text=True)) + assert error_json["errors"] == [ + { + "error": "BadRequestError", + "message": "Some rows have errors. Row 1 - `name`: Missing. Row 2 - `email address`: invalid recipient. Row 3 - `name`: Missing. Row 4 - `name`: Missing.", + } + ] + + @pytest.mark.parametrize("data_type", ["rows", "csv"]) + @pytest.mark.parametrize("is_scheduled", [True, False]) + @pytest.mark.parametrize("use_sender_id", [True, False]) + @pytest.mark.parametrize("has_default_reply_to", [True, False]) + def test_post_bulk_creates_job_and_dispatches_celery_task( + self, + client, + sample_email_template, + mocker, + notify_user, + notify_api, + data_type, + is_scheduled, + use_sender_id, + has_default_reply_to, + ): + data = {"name": "job_name", "template_id": sample_email_template.id} + rows = [["email address"], ["foo@example.com"]] + if data_type == "csv": + data["csv"] = rows_to_csv(rows) + else: + data["rows"] = rows + + if is_scheduled: + scheduled_for = datetime.utcnow() + timedelta(days=1) + data["scheduled_for"] = scheduled_for.isoformat() + if has_default_reply_to: + create_reply_to_email(sample_email_template.service, "test@test.com") + if use_sender_id: + reply_to_email = create_reply_to_email(sample_email_template.service, "custom@test.com", is_default=False) + data["reply_to_id"] = reply_to_email.id + + api_key = create_api_key(service=sample_email_template.service) + job_id = str(uuid.uuid4()) + upload_to_s3 = mocker.patch("app.v2.notifications.post_notifications.upload_job_to_s3", return_value=job_id) + process_job = mocker.patch("app.v2.notifications.post_notifications.process_job.apply_async") - with set_config(notify_api, "CSV_MAX_ROWS", 1): response = client.post( "/v2/notifications/bulk", data=json.dumps(data), @@ -1649,212 +2439,233 @@ def test_post_bulk_flags_too_many_rows(client, sample_email_template, notify_api ], ) - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "Too many rows. Maximum number of rows allowed is 1", + upload_to_s3.assert_called_once_with(sample_email_template.service_id, "email address\r\nfoo@example.com") + if not is_scheduled: + process_job.assert_called_once_with([str(job_id)], queue="job-tasks") + else: + process_job.assert_not_called() + + job = dao_get_job_by_id(job_id) + assert str(job.id) == job_id + assert job.service_id == sample_email_template.service_id + assert job.template_id == sample_email_template.id + assert job.notification_count == 1 + assert job.template_version == sample_email_template.version + assert job.job_status == "scheduled" if is_scheduled else "pending" + assert job.original_file_name == "job_name" + if is_scheduled: + assert job.scheduled_for == scheduled_for + else: + assert job.scheduled_for is None + assert job.api_key_id == api_key.id + if use_sender_id: + assert job.sender_id == reply_to_email.id + else: + assert job.sender_id is None + + assert response.status_code == 201 + + assert json.loads(response.get_data(as_text=True)) == { + "data": { + "api_key": { + "id": str(api_key.id), + "key_type": "normal", + "name": api_key.name, + }, + "archived": False, + "created_at": f"{job.created_at.isoformat()}+00:00", + "created_by": {"id": str(notify_user.id), "name": notify_user.name}, + "id": job_id, + "job_status": "scheduled" if is_scheduled else "pending", + "notification_count": 1, + "original_file_name": "job_name", + "processing_finished": None, + "processing_started": None, + "scheduled_for": f"{scheduled_for.isoformat()}+00:00" if is_scheduled else None, + "service": str(sample_email_template.service_id), + "service_name": {"name": sample_email_template.service.name}, + "template": str(sample_email_template.id), + "template_version": sample_email_template.version, + "updated_at": None, + "sender_id": str(reply_to_email.id) if use_sender_id else None, + } } - ] + def test_post_bulk_sms_sets_sender_id_from_database( + self, + client, + mocker, + notify_user, + notify_api, + ): + service = create_service_with_inbound_number(inbound_number="12345") + template = create_template(service=service) + sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first() + data = {"name": "job_name", "template_id": template.id, "rows": [["phone number"], ["6135550111"]]} + job_id = str(uuid.uuid4()) + mocker.patch("app.v2.notifications.post_notifications.upload_job_to_s3", return_value=job_id) + mocker.patch("app.v2.notifications.post_notifications.process_job.apply_async") + + client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=service.id), + ], + ) -def test_post_bulk_flags_recipient_not_in_safelist_with_team_api_key(client, sample_email_template): - data = { - "name": "job_name", - "template_id": sample_email_template.id, - "csv": rows_to_csv([["email address"], ["foo@example.com"], ["bar@example.com"]]), - } + job = dao_get_job_by_id(job_id) + assert job.sender_id == sms_sender.id - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[ - ("Content-Type", "application/json"), - create_authorization_header(service_id=sample_email_template.service_id, key_type="team"), - ], - ) + def test_post_bulk_with_too_large_sms_fails(self, client, notify_db, notify_db_session, mocker): + mocker.patch("app.sms_normal_publish.publish") + mocker.patch("app.v2.notifications.post_notifications.create_bulk_job", return_value=str(uuid.uuid4())) - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "You cannot send to these recipients because you used a team and safelist API key.", + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template(notify_db, notify_db_session, service=service, template_type="sms", content="a" * 613) + data = { + "name": "job_name", + "template_id": template.id, + "csv": rows_to_csv([["phone number"], ["+16502532222"]]), } - ] - -def test_post_bulk_flags_recipient_not_in_safelist_with_restricted_service(client, notify_db, notify_db_session): - service = create_service(restricted=True) - template = sample_template(notify_db, notify_db_session, service=service, template_type="email") - data = { - "name": "job_name", - "template_id": template.id, - "csv": rows_to_csv([["email address"], ["foo@example.com"], ["bar@example.com"]]), - } - - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[ - ("Content-Type", "application/json"), - create_authorization_header(service_id=template.service_id, key_type="team"), + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=service.id), + ], + ) + assert response.status_code == 400 + assert "has a character count greater than" in str(response.data) + + @pytest.mark.parametrize( + "row_data, failure_row", + [ + ([["phone number", "Name"], ["+16502532222", "a" * 612]], 1), + ([["phone number", "Name"], ["+16502532222", "a"], ["+16502532222", "a" * 612]], 2), ], ) + def test_post_bulk_with_too_large_sms_fail_and_shows_correct_row( + self, client, notify_db, notify_db_session, mocker, row_data, failure_row + ): + mocker.patch("app.sms_normal_publish.publish") + mocker.patch("app.v2.notifications.post_notifications.create_bulk_job", return_value=str(uuid.uuid4())) - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "You cannot send to these recipients because your service is in trial mode. You can only send to members of your team and your safelist.", + service = create_service(sms_daily_limit=10, message_limit=100) + template = create_sample_template( + notify_db, notify_db_session, service=service, template_type="sms", content="Hello (( Name))\nYour thing is due soon" + ) + data = { + "name": "job_name", + "template_id": template.id, + "csv": rows_to_csv(row_data), } - ] + response = client.post( + "/v2/notifications/bulk", + data=json.dumps(data), + headers=[ + ("Content-Type", "application/json"), + create_authorization_header(service_id=service.id), + ], + ) + assert response.status_code == 400 + assert "has a character count greater than" in str(response.data) + assert "Row {}".format(failure_row) in str(response.data) -def test_post_bulk_flags_not_enough_remaining_messages(client, notify_db, notify_db_session, mocker): - service = create_service(message_limit=10) - template = sample_template(notify_db, notify_db_session, service=service, template_type="email") - messages_count_mock = mocker.patch("app.v2.notifications.post_notifications.fetch_todays_total_message_count", return_value=9) - data = { - "name": "job_name", - "template_id": template.id, - "csv": rows_to_csv([["email address"], ["foo@example.com"], ["bar@example.com"]]), - } - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], - ) +class TestBatchPriorityLanes: + @pytest.mark.parametrize("process_type", ["bulk", "normal", "priority"]) + def test_sms_each_queue_is_used(self, notify_api, client, service_factory, mocker, process_type): + mock_redisQueue_SMS_BULK = mocker.patch("app.sms_bulk_publish.publish") + mock_redisQueue_SMS_NORMAL = mocker.patch("app.sms_normal_publish.publish") + mock_redisQueue_SMS_PRIORITY = mocker.patch("app.sms_priority_publish.publish") - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "You only have 1 remaining messages before you reach your daily limit. You've tried to send 2 messages.", + service = service_factory.get("one") + template = create_template(service=service, content="Hello (( Name))\nYour thing is due soon", process_type=process_type) + + data = { + "phone_number": "+16502532222", + "template_id": str(template.id), + "personalisation": {" Name": "Jo"}, } - ] - messages_count_mock.assert_called_once() - - -@pytest.mark.parametrize("data_type", ["rows", "csv"]) -def test_post_bulk_flags_rows_with_errors(client, notify_db, notify_db_session, data_type): - template = sample_template(notify_db, notify_db_session, template_type="email", content="Hello ((name))") - data = {"name": "job_name", "template_id": template.id} - rows = [ - ["email address", "name"], - ["foo@example.com", "Foo"], - ["bar@example.com"], - ["nope", "nope"], - ["baz@example.com", ""], - ["baz@example.com", " "], - ] - if data_type == "csv": - data["csv"] = rows_to_csv(rows) - else: - data["rows"] = rows - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=template.service_id)], - ) + auth_header = create_authorization_header(service_id=template.service_id) - assert response.status_code == 400 - error_json = json.loads(response.get_data(as_text=True)) - assert error_json["errors"] == [ - { - "error": "BadRequestError", - "message": "Some rows have errors. Row 1 - `name`: Missing. Row 2 - `email address`: invalid recipient. Row 3 - `name`: Missing. Row 4 - `name`: Missing.", - } - ] + response = client.post( + path="/v2/notifications/sms", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + + if process_type == "bulk": + assert mock_redisQueue_SMS_BULK.called + elif process_type == "normal": + assert mock_redisQueue_SMS_NORMAL.called + elif process_type == "priority": + assert mock_redisQueue_SMS_PRIORITY.called + + @pytest.mark.parametrize("process_type", ["bulk", "normal", "priority"]) + def test_email_each_queue_is_used(self, notify_api, client, mocker, service_factory, process_type): + mock_redisQueue_EMAIL_BULK = mocker.patch("app.email_bulk_publish.publish") + mock_redisQueue_EMAIL_NORMAL = mocker.patch("app.email_normal_publish.publish") + mock_redisQueue_EMAIL_PRIORITY = mocker.patch("app.email_priority_publish.publish") + + service = service_factory.get("one") + template = create_template( + service=service, template_type="email", content="Hello (( Name))\nYour thing is due soon", process_type=process_type + ) + data = { + "email_address": template.service.users[0].email_address, + "template_id": str(template.id), + "personalisation": {"name": "Jo"}, + } -@pytest.mark.parametrize("data_type", ["rows", "csv"]) -@pytest.mark.parametrize("is_scheduled", [True, False]) -@pytest.mark.parametrize("use_sender_id", [True, False]) -@pytest.mark.parametrize("has_default_reply_to", [True, False]) -def test_post_bulk_creates_job_and_dispatches_celery_task( - client, sample_email_template, mocker, notify_user, notify_api, data_type, is_scheduled, use_sender_id, has_default_reply_to -): - data = {"name": "job_name", "template_id": sample_email_template.id} - rows = [["email address"], ["foo@example.com"]] - if data_type == "csv": - data["csv"] = rows_to_csv(rows) - else: - data["rows"] = rows - - if is_scheduled: - scheduled_for = datetime.utcnow() + timedelta(days=1) - data["scheduled_for"] = scheduled_for.isoformat() - if has_default_reply_to: - create_reply_to_email(sample_email_template.service, "test@test.com") - if use_sender_id: - reply_to_email = create_reply_to_email(sample_email_template.service, "custom@test.com", is_default=False) - data["reply_to_id"] = reply_to_email.id - - api_key = create_api_key(service=sample_email_template.service) - job_id = str(uuid.uuid4()) - upload_to_s3 = mocker.patch("app.v2.notifications.post_notifications.upload_job_to_s3", return_value=job_id) - process_job = mocker.patch("app.v2.notifications.post_notifications.process_job.apply_async") + auth_header = create_authorization_header(service_id=template.service_id) - response = client.post( - "/v2/notifications/bulk", - data=json.dumps(data), - headers=[("Content-Type", "application/json"), create_authorization_header(service_id=sample_email_template.service_id)], + response = client.post( + path="/v2/notifications/email", + data=json.dumps(data), + headers=[("Content-Type", "application/json"), auth_header], + ) + assert response.status_code == 201 + # pytest.set_trace() + if process_type == "bulk": + assert mock_redisQueue_EMAIL_BULK.called + elif process_type == "normal": + assert mock_redisQueue_EMAIL_NORMAL.called + elif process_type == "priority": + assert mock_redisQueue_EMAIL_PRIORITY.called + + +class TestSeedingBounceRateData: + @freeze_time("2019-01-01 12:00:00.000000") + @pytest.mark.parametrize( + "epoch_time, redis_result, result", + [ + ("2019-01-01 15:00:00.000000", True, False), + ("2019-01-01 00:00:00.000000", False, True), + ("2018-12-31 12:00:00.000000", False, True), + ("2018-12-31 12:00:00.000000", False, False), + ("2019-01-01 00:00:00.000000", True, False), + ], ) + def test_seed_bounce_rate(self, notify_api, sample_email_template, mocker, epoch_time, redis_result, result): + service_id = str(sample_email_template.service_id) + mocker.patch("app.v2.notifications.post_notifications.seed_bounce_rate_in_redis.apply_async") + mocker.patch("app.v2.notifications.post_notifications.redis_store.get", return_value=redis_result) + # Convert string to datetime object + date_object = datetime.strptime(epoch_time, "%Y-%m-%d %H:%M:%S.%f") - upload_to_s3.assert_called_once_with(sample_email_template.service_id, "email address\r\nfoo@example.com") - if not is_scheduled: - process_job.assert_called_once_with([str(job_id)], queue="job-tasks") - else: - process_job.assert_not_called() - - job = dao_get_job_by_id(job_id) - assert str(job.id) == job_id - assert job.service_id == sample_email_template.service_id - assert job.template_id == sample_email_template.id - assert job.notification_count == 1 - assert job.template_version == sample_email_template.version - assert job.job_status == "scheduled" if is_scheduled else "pending" - assert job.original_file_name == "job_name" - if is_scheduled: - assert job.scheduled_for == scheduled_for - else: - assert job.scheduled_for is None - assert job.api_key_id == api_key.id - if use_sender_id: - assert job.sender_id == reply_to_email.id - else: - assert job.sender_id is None + # Convert datetime object to epoch timestamp in milliseconds + epoch_timestamp_s = int(date_object.timestamp()) - assert response.status_code == 201 + _seed_bounce_data(epoch_timestamp_s, service_id) - assert json.loads(response.get_data(as_text=True)) == { - "data": { - "api_key": { - "id": str(api_key.id), - "key_type": "normal", - "name": api_key.name, - }, - "archived": False, - "created_at": f"{job.created_at.isoformat()}+00:00", - "created_by": {"id": str(notify_user.id), "name": notify_user.name}, - "id": job_id, - "job_status": "scheduled" if is_scheduled else "pending", - "notification_count": 1, - "original_file_name": "job_name", - "processing_finished": None, - "processing_started": None, - "scheduled_for": f"{scheduled_for.isoformat()}+00:00" if is_scheduled else None, - "service": str(sample_email_template.service_id), - "service_name": {"name": sample_email_template.service.name}, - "template": str(sample_email_template.id), - "template_version": sample_email_template.version, - "updated_at": None, - "sender_id": str(reply_to_email.id) if use_sender_id else None, - } - } + if result: + seed_bounce_rate_in_redis.apply_async.assert_called_once_with(service_id) diff --git a/tests/app/v2/test_errors.py b/tests/app/v2/test_errors.py index 49cd0d4ac0..6841e46efe 100644 --- a/tests/app/v2/test_errors.py +++ b/tests/app/v2/test_errors.py @@ -1,3 +1,5 @@ +import json + import pytest from flask import url_for from sqlalchemy.exc import DataError @@ -168,7 +170,4 @@ def test_bad_method(app_for_test): assert response.status_code == 405 - assert response.json == { - "result": "error", - "message": "The method is not allowed for the requested URL.", - } + assert json.loads(response.data) == {"message": "The method is not allowed for the requested URL.", "result": "error"} diff --git a/tests/conftest.py b/tests/conftest.py index 3ec83bac1c..cfa57a7637 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ import os from contextlib import contextmanager +from typing import List from urllib.parse import urlparse import pytest @@ -9,6 +10,14 @@ from flask import Flask from app import create_app, db +from app.encryption import CryptoSigner + + +def pytest_configure(config): + # Swap to test database if running from devcontainer + if os.environ.get("SQLALCHEMY_DATABASE_TEST_URI") is not None: + os.environ["SQLALCHEMY_DATABASE_URI"] = os.environ.get("SQLALCHEMY_DATABASE_TEST_URI") + os.environ["SQLALCHEMY_DATABASE_READER_URI"] = os.environ.get("SQLALCHEMY_DATABASE_TEST_URI") @pytest.fixture(scope="session") @@ -94,6 +103,7 @@ def notify_db(notify_api, worker_id): "reader": uri_db_reader, "writer": uri_db_writer, } + create_test_db(uri_db_writer) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) @@ -146,7 +156,7 @@ def os_environ(): class EnvironDict(dict): def __setitem__(self, key, value): - assert type(value) == str + assert isinstance(value, str) super().__setitem__(key, value) os.environ = EnvironDict() @@ -188,6 +198,16 @@ def set_config_values(app, dict): app.config[key] = old_values[key] +@contextmanager +def set_signer_secret_key(signer: CryptoSigner, secret_key: str | List[str]): + old_secret_key = signer.secret_key + signer.init_app(signer.app, secret_key, signer.salt) + try: + yield + finally: + signer.init_app(signer.app, old_secret_key, signer.salt) + + class Matcher: def __init__(self, description, key): self.description = description diff --git a/tests_cypress/README b/tests_cypress/README new file mode 100644 index 0000000000..bf82c8b10b --- /dev/null +++ b/tests_cypress/README @@ -0,0 +1,43 @@ +# Notify + Cypress 🎉 + +## Setup +This folder contains Cypress tests suites. In order to run them, you'll need to install cypress and its dependencies. If you're running inside the dev container, rebuild your dev container to get the necessary packages. + +## Running the tests +### In your devcontainer +There are some issues getting the cypress UI to launch within the devcontainer. For now, you can run the headless tests inside the dev container but if you want to launch the cypress UI you will need to do that outside of the dev container. + +There are 2 helper scripts in `package.json` to run 2 of the test suites. Run these from the `tests_cypress/` folder: +- `npm run smoke`: this will run the smoke tests in headless mode using the electron browser +- `npm run a11y`: this will run the accessibility tests in headless mode using the electron browser + +### Outside of your devcontainer +To launch the cypress UI, where you can choose your test suite and visually debug and inspect tests, run (from the `tests_cypress/` folder): +- `npm run cypress`: this will open the cypress UI where you can choose which tests to run and in which browser + +### Local installation +To install cypress locally, use the following command, from the `tests_cypress/` folder: +```bash +npm install +npx cypress install +``` + +## Configuration +- `cypress.env.json`: this file contains sensitive items like api keys and passphrases that you'll need to run the tests. You'll need to add the file `cypress.env.json` into the `tests_cypress/` folder and its contents can be found in lastpass. +- `config.js`: this file contains non-sensitive items like template ids and hostnames that you'll need to run the tests + +### `cypress.env.json` contents +| key | description | +| --------------- | ----------------------------------------------- | +| ADMIN_SECRET | Secret admin uses to authenticate against API | +| ADMIN_USERNAME | Username admin uses to authenticate against API | +| NOTIFY_USER | Notify user used by the tests | +| NOTIFY_PASSWORD | Password of NOTIFY_USER | +| IMAP_PASSWORD | IMAP password of gmail account for NOTIFY_USER | + +### Target environment 🎯 +The tests are configured to run against the staging environment by default. To run the tests against your local environment, you'll need to create a local service and API keys and store these values in your config. You will also need to update the `ConfigToUse` variable in `config.js` file: +```js +const ConfigToUse = config.LOCAL; +``` + diff --git a/tests_cypress/config.js b/tests_cypress/config.js new file mode 100644 index 0000000000..5ea2ae52d8 --- /dev/null +++ b/tests_cypress/config.js @@ -0,0 +1,108 @@ +let PRODUCTION = { + CONFIG_NAME: "PRODUCTION", + Hostnames: { + API: 'https://api.notification.canada.ca', + Admin: 'https://notification.canada.ca', + DDAPI: 'https://api.document.notification.canada.ca', + }, + Services: { + Notify: 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553', + Cypress: '5c8a0501-2aa8-433a-ba51-cefb8063ab93' + }, + Templates: { + 'FILE_ATTACH_TEMPLATE_ID': 'ee6e4f6e-df3c-49b5-82de-eca5122ce965', + 'SIMPLE_EMAIL_TEMPLATE_ID': 'bcc5ff84-4f20-4714-ac8e-7c5bd91c49a6', + 'VARIABLES_EMAIL_TEMPLATE_ID': '0f10fca7-a5e8-4c03-bc96-610072f236eb', + 'SMOKE_TEST_EMAIL': 'be04f866-2302-4b76-8efd-2dec1e853c7d', + 'SMOKE_TEST_EMAIL_BULK': '6978ecf0-8049-47ca-b5af-f010796e8805', + 'SMOKE_TEST_EMAIL_ATTACH': 'ee6e4f6e-df3c-49b5-82de-eca5122ce965', + 'SMOKE_TEST_EMAIL_LINK': '8bac2ff9-32e6-4e19-bf80-4218ce4789fd', + 'SMOKE_TEST_SMS': 'f718f471-d940-44f6-9841-93f61da9b4f7' + }, + Users: { + Team: ['andrew.leith+2@cds-snc.ca'], + NonTeam: ['person@example.com'], + Simulated: ['simulate-delivered-2@notification.canada.ca', 'simulate-delivered-3@notification.canada.ca', 'success@simulator.amazonses.com'], + SimulatedPhone: ['+16132532222', '+16132532223', '+16132532224'] + }, + +} + +let STAGING = { + CONFIG_NAME: "STAGING", + Hostnames: { + API: 'https://api.staging.notification.cdssandbox.xyz', + Admin: 'https://staging.notification.cdssandbox.xyz', + DDAPI: 'https://api.document.staging.notification.cdssandbox.xyz', + }, + Services: { + Notify: 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553', + Cypress: '5c8a0501-2aa8-433a-ba51-cefb8063ab93' + }, + Templates: { + 'FILE_ATTACH_TEMPLATE_ID': '7246c71e-3d60-458b-96af-af17a5b07659', + 'SIMPLE_EMAIL_TEMPLATE_ID': '939dafde-1b60-47f0-a6d5-c9080d92a4a8', + 'VARIABLES_EMAIL_TEMPLATE_ID': '1101a00a-11b7-4036-865c-add43fcff7c9', + 'SMOKE_TEST_EMAIL': '5e26fae6-3565-44d5-bfed-b18680b6bd39', + 'SMOKE_TEST_EMAIL_BULK': '04145882-0f21-4d57-940d-69883fc23e77', + 'SMOKE_TEST_EMAIL_ATTACH': 'bf85def8-01b4-4c72-98a8-86f2bc10f2a4', + 'SMOKE_TEST_EMAIL_LINK': '37924e87-038d-48b8-b122-f6dddefd56d5', + 'SMOKE_TEST_SMS': '16cae0b3-1d44-47ad-a537-fd12cc0646b6' + }, + Users: { + Team: ['andrew.leith+bannertest@cds-snc.ca'], + NonTeam: ['person@example.com'], + Simulated: ['simulate-delivered-2@notification.canada.ca', 'simulate-delivered-3@notification.canada.ca', 'success@simulator.amazonses.com'], + SimulatedPhone: ['+16132532222', '+16132532223', '+16132532224'] + }, + ReplyTos: { + Default: '24e5288d-8bfa-4ad4-93aa-592c11a694cd', + Second: '797865c4-788b-4184-91ae-8e45eb07e40b' + }, + viewports: [320,375,640,768] +}; + +let LOCAL = { + CONFIG_NAME: "LOCAL", + Hostnames: { + API: 'http://localhost:6011', + Admin: 'http://localhost:6012', + DDAPI: 'http://localhost:7000', + }, + Services: { + Notify: 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553', + Cypress: '4049c2d0-0cab-455c-8f4c-f356dff51810' + }, + Templates: { + 'FILE_ATTACH_TEMPLATE_ID': '7246c71e-3d60-458b-96af-af17a5b07659', + 'SIMPLE_EMAIL_TEMPLATE_ID': 'b4692883-4182-4a23-b1b9-7b9df66a66e8', + 'VARIABLES_EMAIL_TEMPLATE_ID': '258d8617-da88-4faa-ad28-46cc69f5a458', + 'SMOKE_TEST_EMAIL': '136e951e-05c8-4db4-bc50-fe122d72fcaa', + 'SMOKE_TEST_EMAIL_BULK': '48207d93-144d-4ebb-92c5-99ff1f1baead', + 'SMOKE_TEST_EMAIL_ATTACH': '58db03d6-a9d8-4482-8621-26f473f3980a', + 'SMOKE_TEST_EMAIL_LINK': '2d52d997-42d3-4ac0-a597-7afc94d4339a', + 'SMOKE_TEST_SMS': '5945e2f0-3e37-4813-9a60-e0665e02e9c8' + }, + Users: { + Team: ['andrew.leith+bannertest@cds-snc.ca'], + NonTeam: ['person@example.com'], + Simulated: ['simulate-delivered-2@notification.canada.ca', 'simulate-delivered-3@notification.canada.ca', 'success@simulator.amazonses.com'], + SimulatedPhone: ['+16132532222', '+16132532223', '+16132532224'] + }, + ReplyTos: { + Default: '1bc45a34-f4de-4635-b36f-7da2e2d248ed', + Second: 'aaa58593-fc0a-46b0-82b8-b303ae662a41' + }, + viewports: [320,375,640,768] +}; + +const config = { + STAGING, + LOCAL, + PRODUCTION +}; + +// choose which config to use here +const ConfigToUse = config.LOCAL; + +module.exports = ConfigToUse; diff --git a/tests_cypress/cypress.config.js b/tests_cypress/cypress.config.js new file mode 100644 index 0000000000..473a922c12 --- /dev/null +++ b/tests_cypress/cypress.config.js @@ -0,0 +1,43 @@ +var config = require('./config'); + +const { defineConfig } = require("cypress"); +const EmailAccount = require('./cypress/plugins/email-account') +const htmlvalidate = require("cypress-html-validate/plugin"); + +module.exports = defineConfig({ + e2e: { + baseUrl: config.Hostnames.API, + setupNodeEvents: async (on, config) => { + htmlvalidate.install(on); + + const emailAccount = await EmailAccount() + on('task', { + getLastEmail() { + return emailAccount.getLastEmail() + }, + deleteAllEmails() { + return emailAccount.deleteAllEmails() + }, + fetchEmail(acct) { + return emailAccount.fetchEmail(acct) + }, + createEmailAccount() { + return emailAccount.createEmailAccount(); + } + }); + + on('before:browser:launch', (browser = {}, launchOptions) => { + if (browser.family === 'chromium' && browser.name !== 'electron') { + launchOptions.extensions = []; + } + return launchOptions; + }); + }, + specPattern: '**/e2e/**/*.cy.js', + watchForFileChanges: false, + blockHosts: ['*google-analytics.com', 'stats.g.doubleclick.net', 'bam.nr-data.net', '*newrelic.com'], + viewportWidth: 1280, + viewportHeight: 850, + testIsolation: false + }, +}); diff --git a/tests_cypress/cypress/Notify/NotifyAPI.js b/tests_cypress/cypress/Notify/NotifyAPI.js new file mode 100644 index 0000000000..400dd1b96b --- /dev/null +++ b/tests_cypress/cypress/Notify/NotifyAPI.js @@ -0,0 +1,170 @@ +import jwt from "jsonwebtoken"; +import config from "../../config"; + +const Utilities = { + CreateJWT: (user, secret) => { + const claims = { + 'iss': user, + 'iat': Math.round(Date.now() / 1000) + } + + var token = jwt.sign(claims, secret); + + return token; + }, + GenerateID: (length=10) => { + let result = ''; + const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + const charactersLength = characters.length; + let counter = 0; + while (counter < length) { + result += characters.charAt(Math.floor(Math.random() * charactersLength)); + counter += 1; + } + return result; + } +}; +const Admin = { + SendOneOff: ({to, template_id}) => { + var token = Utilities.CreateJWT(Cypress.env('ADMIN_USERNAME'), Cypress.env(config.CONFIG_NAME).ADMIN_SECRET); + return cy.request({ + url: `/service/${config.Services.Cypress}/send-notification`, + method: 'POST', + headers: { + Authorization: `Bearer ${token}`, + }, + body: { + 'to': to, + 'template_id': template_id, + 'created_by': Cypress.env(config.CONFIG_NAME).NOTIFY_USER_ID, + } + }); + } +} + +const API = { + SendEmail: ({ api_key, to, template_id, personalisation, failOnStatusCode = true, email_reply_to_id }) => { + return cy.request({ + failOnStatusCode: failOnStatusCode, + url: '/v2/notifications/email', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: 'ApiKey-v1 ' + api_key, + }, + body: { + "email_address": to, + "template_id": template_id, + "personalisation": personalisation, + ...(email_reply_to_id) && { email_reply_to_id: email_reply_to_id } // only add email_reply_to_id if it's defined + } + }); + }, + SendBulkEmail: ({ api_key, to, bulk_name, template_id, personalisation, failOnStatusCode = true, scheduled_for}) => { + return cy.request({ + failOnStatusCode: failOnStatusCode, + url: '/v2/notifications/bulk', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: 'ApiKey-v1 ' + api_key, + }, + body: { + "name": bulk_name, + "template_id": template_id, + "rows": [ + ["email address"], + ...to + ], + ...(scheduled_for) && { scheduled_for: scheduled_for } // only add scheduled_for if it's defined + } + }); + }, + SendSMS: ({ api_key, to, template_id, personalisation, failOnStatusCode = true }) => { + return cy.request({ + failOnStatusCode: failOnStatusCode, + url: '/v2/notifications/sms', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: 'ApiKey-v1 ' + api_key, + }, + body: { + "phone_number": to, + "template_id": template_id, + "personalisation": personalisation, + } + }); + }, + SendBulkSMS: ({ api_key, to, bulk_name, template_id, personalisation, failOnStatusCode = true }) => { + return cy.request({ + failOnStatusCode: failOnStatusCode, + url: '/v2/notifications/bulk', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: 'ApiKey-v1 ' + api_key, + }, + body: { + "name": bulk_name, + "template_id": template_id, + "rows": [ + ["phone number"], + ...to + ], + } + }); + }, + CreateAPIKey: ({ service_id, key_type, name }) => { + var token = Utilities.CreateJWT(Cypress.env('ADMIN_USERNAME'), Cypress.env(config.CONFIG_NAME).ADMIN_SECRET); + return cy.request({ + url: `/service/${service_id}/api-key`, + method: 'POST', + headers: { + Authorization: `Bearer ${token}`, + }, + body: { + key_type: key_type, + name: name, + created_by: Cypress.env(config.CONFIG_NAME).NOTIFY_USER_ID, + } + }); + }, + RevokeAPIKey: ({ token, type, url, source, failOnStatusCode = true }) => { + + var jwt_token = Utilities.CreateJWT(Cypress.env('SRE_USERNAME'), Cypress.env(config.CONFIG_NAME).SRE_SECRET); + cy.request({ + url: `/sre-tools/api-key-revoke`, + method: 'POST', + headers: { + Authorization: `Bearer ${jwt_token}`, + }, + body: { + "token": token, + "type": type, + "url": url, + "source": source + } + }); + }, + RevokeAPIKeyWithAdminAuth: ({ token, type, url, source, failOnStatusCode = true }) => { + var jwt_token = Utilities.CreateJWT(Cypress.env('ADMIN_USERNAME'),Cypress.env(config.CONFIG_NAME).ADMIN_SECRET); + return cy.request({ + url: `/sre-tools/api-key-revoke`, + method: 'POST', + headers: { + Authorization: `Bearer ${jwt_token}`, + }, + body: { + "token": token, + "type": type, + "url": url, + "source": source + }, + failOnStatusCode: failOnStatusCode + }); + } + +} + +export default { API, Utilities, Admin }; diff --git a/tests_cypress/cypress/e2e/api/all.cy.js b/tests_cypress/cypress/e2e/api/all.cy.js new file mode 100644 index 0000000000..48c595e245 --- /dev/null +++ b/tests_cypress/cypress/e2e/api/all.cy.js @@ -0,0 +1,3 @@ +import './email_notifications.cy'; +import './file_attach.cy'; +import './sre_tools.cy'; \ No newline at end of file diff --git a/tests_cypress/cypress/e2e/api/email_notifications.cy.js b/tests_cypress/cypress/e2e/api/email_notifications.cy.js new file mode 100644 index 0000000000..a1a4a3f79c --- /dev/null +++ b/tests_cypress/cypress/e2e/api/email_notifications.cy.js @@ -0,0 +1,156 @@ +/// + +import config from '../../../config'; +import Notify from "../../Notify/NotifyAPI"; + +describe(`Email notifications test[${config.CONFIG_NAME}]`, () => { + var keys = { + LIVE: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + TEAM: Cypress.env(config.CONFIG_NAME).API_KEY_TEAM, + TEST: Cypress.env(config.CONFIG_NAME).API_KEY_TEST, + }; + + + for (const api_key in keys) { + context(`With ${api_key} api key`, () => { + it('can send email notification without personalisation', () => { + Notify.API.SendEmail({ + api_key: keys[api_key], + to: api_key === 'TEAM' ? config.Users.Team[0] : config.Users.Simulated[1], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: {}, + }).as('emailRequest'); + + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + }); + + it('can send email notification with personalisation', () => { + Notify.API.SendEmail({ + api_key: keys[api_key], + to: api_key === 'TEAM' ? config.Users.Team[0] : config.Users.Simulated[1], + template_id: config.Templates.VARIABLES_EMAIL_TEMPLATE_ID, + personalisation: { + name: 'Alex', + has_stuff: true + }, + }).as('emailRequest'); + + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + + }); + + it('can send email to smoke test addresses', () => { + if (api_key !== 'TEAM') { + for (const email of config.Users.Simulated) { + Notify.API.SendEmail({ + api_key: keys[api_key], + to: email, + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: {}, + }).as('emailRequest'); + + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + } + } + }); + + it('can use a non-default replyTo', () => { + Notify.API.SendEmail({ + api_key: keys[api_key], + to: config.Users.Simulated[0], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: {}, + email_reply_to_id: config.ReplyTos.Second + }).as('emailRequest'); + + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + }); + + it('can use a default replyTo', () => { + Notify.API.SendEmail({ + api_key: keys[api_key], + to: config.Users.Simulated[0], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: {}, + email_reply_to_id: config.ReplyTos.Default + }).as('emailRequest'); + + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + }); + + it('can use no replyTo', () => { + Notify.API.SendEmail({ + api_key: keys[api_key], + to: config.Users.Simulated[0], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: {} + }).as('emailRequest'); + + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + }); + + it('Can scheduled a bulk email send', () => { + // Schedule 20 seconds from now + var secheduled_for = new Date(); + secheduled_for.setSeconds(secheduled_for.getSeconds()+20); + + // send an email using the Notify API + Notify.API.SendBulkEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: [[config.Users.Simulated[0]],[config.Users.Simulated[0]],[config.Users.Simulated[0]],[config.Users.Simulated[0]],[config.Users.Simulated[0]]], + bulk_name: "Smoke Test", + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: {}, + scheduled_for: secheduled_for.toISOString(), + }).as('emailRequest'); + + // ensure API returns a 201 + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + }); + // Additional tests for TEAM keys + if (api_key === 'TEAM') { + it('can send to team address', () => { + Notify.API.SendEmail({ + api_key: keys[api_key], + to: config.Users.Team[0], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: {}, + }).as('emailRequest'); + + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + }); + + it('cannot send to non-team address', () => { + Notify.API.SendEmail({ + api_key: keys[api_key], + to: config.Users.NonTeam[0], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: {}, + failOnStatusCode: false + }).as('emailRequest'); + + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(400); + }); + }); + } + }); + } +}); + diff --git a/tests_cypress/cypress/e2e/api/file_attach.cy.js b/tests_cypress/cypress/e2e/api/file_attach.cy.js new file mode 100644 index 0000000000..e6cc0e9619 --- /dev/null +++ b/tests_cypress/cypress/e2e/api/file_attach.cy.js @@ -0,0 +1,56 @@ +/// + +import config from '../../../config'; +import Notify from "../../Notify/NotifyAPI"; + +describe('File attachment test', () => { + it('can send single attachment', () => { + cy.fixture('payloads/file_attachment_1').then(file_payload => { + Notify.API.SendEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: config.Users.Simulated[0], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: file_payload, + failOnStatusCode: false + }).as('fileRequest'); + + cy.get('@fileRequest').then(todos => { + expect(todos.status).to.eq(201); + }); + }); + }); + + it('can send 10 attachments', () => { + cy.fixture('payloads/file_attachment_10').then(file_payload => { + Notify.API.SendEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: config.Users.Simulated[0], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: file_payload, + failOnStatusCode: false + }).as('fileRequest'); + + cy.get('@fileRequest').then(todos => { + expect(todos.status).to.eq(201); + }); + }); + }); + + it('cannot send 16 attachments', () => { + cy.fixture('payloads/file_attachment_16').then(file_payload => { + + Notify.API.SendEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: config.Users.Simulated[0], + template_id: config.Templates.SIMPLE_EMAIL_TEMPLATE_ID, + personalisation: file_payload, + failOnStatusCode: false + }).as('fileRequest'); + + cy.get('@fileRequest').then(todos => { + expect(todos.status).to.eq(400); + }); + }); + }); + +}); diff --git a/tests_cypress/cypress/e2e/api/sre_tools.cy.js b/tests_cypress/cypress/e2e/api/sre_tools.cy.js new file mode 100644 index 0000000000..01e24d0c52 --- /dev/null +++ b/tests_cypress/cypress/e2e/api/sre_tools.cy.js @@ -0,0 +1,43 @@ +/// + +import config from '../../../config'; +import Notify from "../../Notify/NotifyAPI"; + +describe('SRE Tools', () => { + it('can revoke an API key using SRE auth', () => { + let key_name = 'api-revoke-test-' + Notify.Utilities.GenerateID(); + + Notify.API.CreateAPIKey({ + service_id: config.Services.Cypress, + key_type: 'normal', + name: key_name + }).as('APIKey'); + + cy.log("Generated API KEY: " + key_name); + + cy.get('@APIKey').then((response) => { + let api_key = response.body.data.key_name + "-" + config.Services.Cypress + "-" + response.body.data.key; + + Notify.API.RevokeAPIKey({ + token: api_key, + type: 'normal', + url:'https://example.com', + source: 'Cypress Test' + }); + cy.log("Revoked API KEY: " + key_name); + }); + }); + it('cannot revoke an API key using admin auth', () => { + Notify.API.RevokeAPIKeyWithAdminAuth({ + token: "fake-key", + type: 'normal', + url:'https://example.com', + source: 'Cypress Test', + failOnStatusCode: false + }).as('revokeRequest'); + + cy.get('@revokeRequest').then(response => { + expect(response.status).to.eq(401); + }); + }); +}); diff --git a/tests_cypress/cypress/e2e/smoke/smoke.cy.js b/tests_cypress/cypress/e2e/smoke/smoke.cy.js new file mode 100644 index 0000000000..f4500fd43b --- /dev/null +++ b/tests_cypress/cypress/e2e/smoke/smoke.cy.js @@ -0,0 +1,311 @@ +/// + +const { recurse } = require('cypress-recurse') +const nodemailer = require("nodemailer"); +import config from '../../../config'; +import Notify from "../../Notify/NotifyAPI"; + +const ADMIN_COOKIE = 'notify_admin_session'; + +describe(`Smoke tests [${config.CONFIG_NAME}]`, () => { + context('API tests', () => { + context('Email', () => { + it('can send/receive a one-off email', () => { + // create an ethereal email account to use for this test + cy.task('createEmailAccount').then(acct => { + cy.log("Email account created for test: " + acct.user); + + // send an email using the Notify API + Notify.API.SendEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: acct.user, + template_id: config.Templates.SMOKE_TEST_EMAIL, + personalisation: {}, + }).as('emailRequest'); + + // ensure API returns a 201 + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + + // verify email receipt + recurse( + () => cy.task('fetchEmail', acct), // Cypress commands to retry + Cypress._.isObject, // keep retrying until the tas`k returns an object + { + log: true, + limit: 50, // max number of iterations + timeout: 30000, // time limit in ms + delay: 500, // delay before next iteration, ms + }, + ).then(response => { + response.html = ` +
+
    +
  • FROM: ${response.from}
  • +
  • TO: ${response.to}
  • +
  • SUBJECT: ${response.subject}
  • +
+
+
+ ` + response.html; + cy.document().then((document) => { document.documentElement.innerHTML = response.html }) + }); + + // ensure SMOKE test email is received + cy.contains('p', "SMOKE_TEST").should('be.visible'); + }); + }); + + it('can send/receive bulk CSV emails', () => { + // create an ethereal email account to use for this test + cy.task('createEmailAccount').then(acct => { + cy.log("Email account created for test: " + acct.user); + + // send an email using the Notify API + Notify.API.SendBulkEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: [[acct.user],[acct.user],[acct.user],[acct.user],[acct.user]], + bulk_name: "Smoke Test", + template_id: config.Templates.SMOKE_TEST_EMAIL_BULK, + personalisation: {}, + }).as('emailRequest'); + + // ensure API returns a 201 + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + + // verify email receipt + recurse( + () => cy.task('fetchEmail', acct), // Cypress commands to retry + (response) => response.totalEmails === 5, // keep trying until the inbox has 5 emails + { + log: true, + limit: 50, // max number of iterations + timeout: 30000, // time limit in ms + delay: 500, // delay before next iteration, ms + }, + ).then(response => { + cy.document().then((document) => { document.documentElement.innerHTML = response.html }) + }); + // ensure SMOKE test email is received + cy.contains('p', "SMOKE_TEST_EMAIL_BULK").should('be.visible'); + }); + + }); + + it('can send/receive a one-off email w/ attachment', () => { + cy.task('createEmailAccount').then(acct => { + cy.log("Email account created for test: " + acct.user); + + // send an email using the Notify API + cy.fixture('payloads/file_attachment_1').then(file_payload => { + Notify.API.SendEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: acct.user, + template_id: config.Templates.SMOKE_TEST_EMAIL_ATTACH, + personalisation: file_payload, + failOnStatusCode: false + }).as('fileRequest'); + + cy.get('@fileRequest').then(todos => { + expect(todos.status).to.eq(201); + }); + + // verify email receipt + recurse( + () => cy.task('fetchEmail', acct), // Cypress commands to retry + Cypress._.isObject, // keep retrying until the tas`k returns an object + { + log: true, + limit: 50, // max number of iterations + timeout: 30000, // time limit in ms + delay: 500, // delay before next iteration, ms + }, + ).then(response => { + expect(response.attachments[0].filename).to.equal(file_payload.application_file1.filename); + cy.document().then((document) => { document.documentElement.innerHTML = response.html }) + }); + }); + // ensure SMOKE test email is received + cy.contains('p', "SMOKE_TEST_EMAIL_ATTACH").should('be.visible'); + }); + }); + + it('can send/receive one-off emails w/ link attachment', () => { + cy.task('createEmailAccount').then(acct => { + cy.log("Email account created for test: " + acct.user); + + // send an email using the Notify API + cy.fixture('payloads/file_link').then(file_payload => { + Notify.API.SendEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: acct.user, + template_id: config.Templates.SMOKE_TEST_EMAIL_LINK, + personalisation: file_payload, + failOnStatusCode: false + }).as('fileRequest'); + + cy.get('@fileRequest').then(todos => { + expect(todos.status).to.eq(201); + }); + + // verify email receipt + recurse( + () => cy.task('fetchEmail', acct), // Cypress commands to retry + Cypress._.isObject, // keep retrying until the tas`k returns an object + { + log: true, + limit: 50, // max number of iterations + timeout: 30000, // time limit in ms + delay: 500, // delay before next iteration, ms + }, + ).then(response => { + cy.document().then((document) => { document.documentElement.innerHTML = response.html }) + }); + }); + // ensure SMOKE test email is received + cy.contains('p', "SMOKE_TEST_EMAIL_LINK").should('be.visible'); + // ensure link to ddapi is in the email + cy.contains('p', config.Hostnames.DDAPI).should('be.visible'); + }); + }); + + it('can schedule and receive bulk CSV emails', () => { + // create an ethereal email account to use for this test + cy.task('createEmailAccount').then(acct => { + cy.log("Email account created for test: " + acct.user); + + // Schedule 20 seconds from now + var secheduled_for = new Date(); + secheduled_for.setSeconds(secheduled_for.getSeconds()+20); + + // send an email using the Notify API + Notify.API.SendBulkEmail({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: [[acct.user],[acct.user],[acct.user],[acct.user],[acct.user]], + bulk_name: "Smoke Test", + template_id: config.Templates.SMOKE_TEST_EMAIL_BULK, + personalisation: {}, + scheduled_for: secheduled_for.toISOString(), + }).as('emailRequest'); + + // ensure API returns a 201 + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + + // verify email receipt + recurse( + () => cy.task('fetchEmail', acct), // Cypress commands to retry + (response) => response.totalEmails === 5, // keep trying until the inbox has 5 emails + { + log: true, + limit: 50, // max number of iterations + timeout: 65000, // time limit in ms + delay: 500, // delay before next iteration, ms + }, + ).then(response => { + cy.document().then((document) => { document.documentElement.innerHTML = response.html }) + }); + // ensure SMOKE test email is received + cy.contains('p', "SMOKE_TEST_EMAIL_BULK").should('be.visible'); + }); + + }); + }); + + context('SMS', () => { + it('can send a one-off SMS', () => { + // send an email using the Notify API + Notify.API.SendSMS({ + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: config.Users.SimulatedPhone[0], + template_id: config.Templates.SMOKE_TEST_SMS, + personalisation: {}, + }).as('emailRequest'); + + // ensure API returns a 201 + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + }); + it('can send bulk CSV SMSs', () => { + // send an email using the Notify API + Notify.API.SendBulkSMS({ + bulk_name: "Smoke Test", + api_key: Cypress.env(config.CONFIG_NAME).API_KEY_LIVE, + to: [[config.Users.SimulatedPhone[0]],[config.Users.SimulatedPhone[1]]], + template_id: config.Templates.SMOKE_TEST_SMS, + personalisation: {}, + }).as('emailRequest'); + + // ensure API returns a 201 + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + }); + }); + }); + + context('ADMIN tests', () => { + context('Email', () => { + it('can send/receive a one-off email', () => { + // create an ethereal email account to use for this test + cy.task('createEmailAccount').then(acct => { + cy.log("Email account created for test: " + acct.user); + + Notify.Admin.SendOneOff({ + to: acct.user, + template_id: config.Templates.SMOKE_TEST_EMAIL + }).as('emailRequest'); + + // ensure API returns a 201 + cy.get('@emailRequest').then(resp => { + expect(resp.status).to.eq(201); + }); + + // verify email receipt + recurse( + () => cy.task('fetchEmail', acct), // Cypress commands to retry + Cypress._.isObject, // keep retrying until the tas`k returns an object + { + log: true, + limit: 50, // max number of iterations + timeout: 30000, // time limit in ms + delay: 500, // delay before next iteration, ms + }, + ).then(response => { + response.html = ` +
+
    +
  • FROM: ${response.from}
  • +
  • TO: ${response.to}
  • +
  • SUBJECT: ${response.subject}
  • +
+
+
+ ` + response.html; + cy.document().then((document) => { document.documentElement.innerHTML = response.html }) + }); + + // ensure SMOKE test email is received + cy.contains('p', "SMOKE_TEST").should('be.visible'); + }); + + }); + // it('can send/receive bulk CSV emails', () => { + // }); + }); + + context('SMS', () => { + // it('can send a one-off SMS', () => { + // }); + // it('can send bulk CSV SMSs', () => { + // }); + }); + }); + +}); + diff --git a/tests_cypress/cypress/fixtures/payloads/file_attachment_1.json b/tests_cypress/cypress/fixtures/payloads/file_attachment_1.json new file mode 100644 index 0000000000..d94eafa068 --- /dev/null +++ b/tests_cypress/cypress/fixtures/payloads/file_attachment_1.json @@ -0,0 +1,7 @@ +{ + "application_file1": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file1.txt", + "sending_method": "attach" + } +} \ No newline at end of file diff --git a/tests_cypress/cypress/fixtures/payloads/file_attachment_10.json b/tests_cypress/cypress/fixtures/payloads/file_attachment_10.json new file mode 100644 index 0000000000..093d479c5d --- /dev/null +++ b/tests_cypress/cypress/fixtures/payloads/file_attachment_10.json @@ -0,0 +1,52 @@ +{ + "application_file1": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file1.txt", + "sending_method": "attach" + }, + "application_file2": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file2.txt", + "sending_method": "attach" + }, + "application_file3": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file3.txt", + "sending_method": "attach" + }, + "application_file4": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file4.txt", + "sending_method": "attach" + }, + "application_file5": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file5.txt", + "sending_method": "attach" + }, + "application_file6": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file6.txt", + "sending_method": "attach" + }, + "application_file7": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file7.txt", + "sending_method": "attach" + }, + "application_file8": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file8.txt", + "sending_method": "attach" + }, + "application_file9": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file9.txt", + "sending_method": "attach" + }, + "application_file10": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file10.txt", + "sending_method": "attach" + } +} \ No newline at end of file diff --git a/tests_cypress/cypress/fixtures/payloads/file_attachment_16.json b/tests_cypress/cypress/fixtures/payloads/file_attachment_16.json new file mode 100644 index 0000000000..6c60532c43 --- /dev/null +++ b/tests_cypress/cypress/fixtures/payloads/file_attachment_16.json @@ -0,0 +1,82 @@ +{ + "application_file1": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file1.txt", + "sending_method": "attach" + }, + "application_file2": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file2.txt", + "sending_method": "attach" + }, + "application_file3": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file3.txt", + "sending_method": "attach" + }, + "application_file4": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file4.txt", + "sending_method": "attach" + }, + "application_file5": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file5.txt", + "sending_method": "attach" + }, + "application_file6": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file6.txt", + "sending_method": "attach" + }, + "application_file7": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file7.txt", + "sending_method": "attach" + }, + "application_file8": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file8.txt", + "sending_method": "attach" + }, + "application_file9": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file9.txt", + "sending_method": "attach" + }, + "application_file10": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file10.txt", + "sending_method": "attach" + }, + "application_file11": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file11.txt", + "sending_method": "attach" + }, + "application_file12": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file12.txt", + "sending_method": "attach" + }, + "application_file13": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file13.txt", + "sending_method": "attach" + }, + "application_file14": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file14.txt", + "sending_method": "attach" + }, + "application_file15": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file15.txt", + "sending_method": "attach" + }, + "application_file16": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "application_file16.txt", + "sending_method": "attach" + } +} \ No newline at end of file diff --git a/tests_cypress/cypress/fixtures/payloads/file_link.json b/tests_cypress/cypress/fixtures/payloads/file_link.json new file mode 100644 index 0000000000..29b353ac5d --- /dev/null +++ b/tests_cypress/cypress/fixtures/payloads/file_link.json @@ -0,0 +1,7 @@ +{ + "link_to_file": { + "file": "VGhpcyBpcyBhIGZpbGUh", + "filename": "your_custom_filename.pdf", + "sending_method": "link" + } +} \ No newline at end of file diff --git a/tests_cypress/cypress/plugins/email-account.js b/tests_cypress/cypress/plugins/email-account.js new file mode 100644 index 0000000000..93b3ad9f67 --- /dev/null +++ b/tests_cypress/cypress/plugins/email-account.js @@ -0,0 +1,199 @@ +// used to check the email inbox +const imaps = require('imap-simple') +const nodemailer = require("nodemailer"); +// used to parse emails from the inbox +const simpleParser = require('mailparser').simpleParser +const env = require('../../cypress.env.json'); +const _ = require('lodash'); + +const emailAccount = async () => { + + const emailConfig = { + imap: { + user: env.NOTIFY_USER, + password: env.IMAP_PASSWORD, + host: 'imap.gmail.com', + port: 993, + tls: true, + authTimeout: 10000, + tlsOptions: { + rejectUnauthorized: false + } + }, + } + + const userEmail = { + /** + * Utility method for getting the last email + * for the Ethereal email account + */ + async deleteAllEmails() { + // console.debug('Purging the inbox...') + + try { + const connection = await imaps.connect(emailConfig) + + // grab up to 50 emails from the inbox + await connection.openBox('INBOX') + const searchCriteria = ['1:50'] + const fetchOptions = { + bodies: [''], + } + const messages = await connection.search(searchCriteria, fetchOptions) + + if (!messages.length) { + // console.log('Cannot find any emails') + // and close the connection to avoid it hanging + connection.end() + return null + } else { + // console.log('There are %d messages, deleting them...', messages.length) + // delete all messages + const uidsToDelete = messages + .filter(message => { + return message.parts + }) + .map(message => message.attributes.uid); + + if (uidsToDelete.length > 0) { + await connection.deleteMessage(uidsToDelete); + } + // and close the connection to avoid it hanging + connection.end() + + // and returns the main fields + return {} + } + } catch (e) { + // and close the connection to avoid it hanging + // connection.end() + console.error(e) + return null + } + }, + /** + * Utility method for getting the last email + * for the Ethereal email account + */ + async getLastEmail() { + // makes debugging very simple + // console.log('Getting the last email') + + try { + const connection = await imaps.connect(emailConfig) + + // grab up to 50 emails from the inbox + await connection.openBox('INBOX') + const searchCriteria = ['1:50', 'UNDELETED'] + const fetchOptions = { + bodies: [''], + } + const messages = await connection.search(searchCriteria, fetchOptions) + // and close the connection to avoid it hanging + connection.end() + + if (!messages.length) { + // console.log('Cannot find any emails') + return null + } else { + // console.log('There are %d messages', messages.length) + // grab the last email + const mail = await simpleParser( + messages[messages.length - 1].parts[0].body, + ) + // console.log(mail.subject) + // console.log(mail.text) + + + // and returns the main fields + return { + subject: mail.subject, + text: mail.text, + html: mail.html, + } + } + } catch (e) { + // and close the connection to avoid it hanging + // connection.end() + + console.error(e) + return null + } + }, + async fetchEmail(acct) { + const _config = { + imap: { + user: acct.user, + password: acct.pass, + host: "imap.ethereal.email", //'imap.gmail.com', + port: 993, + tls: true, + authTimeout: 10000, + tlsOptions: { + rejectUnauthorized: false + } + }, + } + try { + const connection = await imaps.connect(_config) + + // grab up to 50 emails from the inbox + await connection.openBox('INBOX') + const searchCriteria = ['1:50', 'UNDELETED'] + const fetchOptions = { + bodies: [''], + } + const messages = await connection.search(searchCriteria, fetchOptions) + // and close the connection to avoid it hanging + connection.end() + + if (!messages.length) { + // console.log('Cannot find any emails, retrying...') + return null + } else { + // console.log('There are %d messages', messages.length) + // messages.forEach(function (item) { + // var all = _.find(item.parts, { "which": "" }) + // var id = item.attributes.uid; + // var idHeader = "Imap-Id: "+id+"\r\n"; + // simpleParser(idHeader+all.body, (err, mail) => { + // // access to the whole mail object + // console.log(mail.subject) + // console.log(mail.html) + // }); + // }); + + // grab the last email + const mail = await simpleParser( + messages[messages.length - 1].parts[0].body, + ) + // console.log('m', mail) + // and returns the main fields + return { + subject: mail.subject, + to: mail.to.text, + from: mail.from.text.replace(/<|>/g, ''), + html: mail.html, + totalEmails: messages.length, + attachments: mail.attachments + } + } + } catch (e) { + // and close the connection to avoid it hanging + // connection.end() + + console.error(e) + return null + } + }, + async createEmailAccount() { + let testAccount = await nodemailer.createTestAccount(); + // console.log("test account created: ", testAccount); + return testAccount; + } + } + + return userEmail +} + +module.exports = emailAccount diff --git a/tests_cypress/cypress/support/commands.js b/tests_cypress/cypress/support/commands.js new file mode 100644 index 0000000000..72b085b4c2 --- /dev/null +++ b/tests_cypress/cypress/support/commands.js @@ -0,0 +1,26 @@ +// *********************************************** +// This example commands.js shows you how to +// create various custom commands and overwrite +// existing commands. +// +// For more comprehensive examples of custom +// commands please read more here: +// https://on.cypress.io/custom-commands +// *********************************************** +// +// +// -- This is a parent command -- +// Cypress.Commands.add('login', (email, password) => { ... }) +// +// +// -- This is a child command -- +// Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... }) +// +// +// -- This is a dual command -- +// Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... }) +// +// +// -- This will overwrite an existing command -- +// Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... }) + diff --git a/tests_cypress/cypress/support/e2e.js b/tests_cypress/cypress/support/e2e.js new file mode 100644 index 0000000000..407f64713f --- /dev/null +++ b/tests_cypress/cypress/support/e2e.js @@ -0,0 +1,21 @@ +// *********************************************************** +// This example support/e2e.js is processed and +// loaded automatically before your test files. +// +// This is a great place to put global configuration and +// behavior that modifies Cypress. +// +// You can change the location of this file or turn off +// automatically serving support files with the +// 'supportFile' configuration option. +// +// You can read more here: +// https://on.cypress.io/configuration +// *********************************************************** + +// Import commands.js using ES2015 syntax: +import './commands' +import 'cypress-axe' +import 'cypress-html-validate/commands' +// Alternatively you can use CommonJS syntax: +// require('./commands') \ No newline at end of file diff --git a/tests_cypress/package-lock.json b/tests_cypress/package-lock.json new file mode 100644 index 0000000000..722e36d686 --- /dev/null +++ b/tests_cypress/package-lock.json @@ -0,0 +1,2447 @@ +{ + "requires": true, + "lockfileVersion": 1, + "dependencies": { + "@babel/code-frame": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "dev": true, + "requires": { + "@babel/highlight": "^7.18.6" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "dev": true + }, + "@babel/highlight": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.18.6", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "@colors/colors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", + "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", + "dev": true, + "optional": true + }, + "@cypress/request": { + "version": "2.88.12", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.12.tgz", + "integrity": "sha512-tOn+0mDZxASFM+cuAP9szGUGPI1HwWVSvdzm7V4cCsPdFTx6qMj29CwaQmRAMIEhORIUBFBsYROYJcveK4uOjA==", + "dev": true, + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "http-signature": "~1.3.6", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "performance-now": "^2.1.0", + "qs": "~6.10.3", + "safe-buffer": "^5.1.2", + "tough-cookie": "^4.1.3", + "tunnel-agent": "^0.6.0", + "uuid": "^8.3.2" + } + }, + "@cypress/xvfb": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", + "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", + "dev": true, + "requires": { + "debug": "^3.1.0", + "lodash.once": "^4.1.1" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "@html-validate/stylish": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@html-validate/stylish/-/stylish-4.2.0.tgz", + "integrity": "sha512-Nl8HCv0hGRSLQ+n1OD4Hk3a+Urwk9HH0vQkAzzCarT4KlA7bRl+6xEiS5PZVwOmjtC7XiH/oNe3as9Fxcr2A1w==", + "dev": true, + "requires": { + "kleur": "^4.0.0" + } + }, + "@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "requires": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true + }, + "ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true + }, + "emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "requires": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + } + }, + "string-width-cjs": { + "version": "npm:string-width@4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + } + } + }, + "strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "requires": { + "ansi-regex": "^6.0.1" + } + }, + "strip-ansi-cjs": { + "version": "npm:strip-ansi@6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + } + } + }, + "wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "requires": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + } + }, + "wrap-ansi-cjs": { + "version": "npm:wrap-ansi@7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + } + } + } + } + }, + "@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true + }, + "@selderee/plugin-htmlparser2": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@selderee/plugin-htmlparser2/-/plugin-htmlparser2-0.11.0.tgz", + "integrity": "sha512-P33hHGdldxGabLFjPPpaTxVolMrzrcegejx+0GxjrIb9Zv48D8yAIA/QTDR2dFl7Uz7urX8aX6+5bCZslr+gWQ==", + "requires": { + "domhandler": "^5.0.3", + "selderee": "^0.11.0" + } + }, + "@sidvind/better-ajv-errors": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@sidvind/better-ajv-errors/-/better-ajv-errors-2.1.0.tgz", + "integrity": "sha512-JuIb009FhHuL9priFBho2kv7QmZOydj0LgYvj+h1t0mMCmhM/YmQNRlJR5wVtBZya6wrVFK5Hi5TIbv5BKEx7w==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.16.0", + "chalk": "^4.1.0" + } + }, + "@types/node": { + "version": "16.18.76", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.76.tgz", + "integrity": "sha512-/GsO2uv1Z6R42lBr59dtem56gVF/yHKQaScggwU+gLU6DXE25sDmOar4c4IfWb3h+X/7OYZznPOFk7oGF3jQSA==", + "dev": true + }, + "@types/sinonjs__fake-timers": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", + "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", + "dev": true + }, + "@types/sizzle": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.8.tgz", + "integrity": "sha512-0vWLNK2D5MT9dg0iOo8GlKguPAU02QjmZitPEsXRuJXU/OGIOt9vT9Fc26wtYuavLxtO45v9PGleoL9Z0k1LHg==", + "dev": true + }, + "@types/yauzl": { + "version": "2.10.3", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz", + "integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==", + "dev": true, + "optional": true, + "requires": { + "@types/node": "*" + } + }, + "acorn": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz", + "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==", + "dev": true + }, + "acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true + }, + "acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true + }, + "aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "requires": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + } + }, + "ajv": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, + "ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true + }, + "ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "requires": { + "type-fest": "^0.21.3" + } + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "dev": true + }, + "asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dev": true, + "requires": { + "safer-buffer": "~2.1.0" + } + }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "dev": true + }, + "astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true + }, + "async": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==", + "dev": true + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true + }, + "at-least-node": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", + "dev": true + }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", + "dev": true + }, + "aws4": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.12.0.tgz", + "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==", + "dev": true + }, + "axe-core": { + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.8.4.tgz", + "integrity": "sha512-CZLSKisu/bhJ2awW4kJndluz2HLZYIHh5Uy1+ZwDRkJi69811xgIXXfdU9HSLX0Th+ILrHj8qfL/5wzamsFtQg==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dev": true, + "requires": { + "tweetnacl": "^0.14.3" + } + }, + "blob-util": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", + "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", + "dev": true + }, + "bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true + }, + "buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + }, + "cachedir": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.4.0.tgz", + "integrity": "sha512-9EtFOZR8g22CL7BWjJ9BUx1+A/djkofnyW3aOXZORNW2kxoUpx2h+uN2cOqwPmFhnpVmxg+KW2OjOSgChTEvsQ==", + "dev": true + }, + "call-bind": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", + "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "dev": true, + "requires": { + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.1", + "set-function-length": "^1.1.1" + } + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==", + "dev": true + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "check-more-types": { + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", + "integrity": "sha512-Pj779qHxV2tuapviy1bSZNEL1maXr13bPYpsvSDB68HlYcYuhlDrmGd63i0JHMCLKzc7rUSNIrpdJlhVlNwrxA==", + "dev": true + }, + "ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true + }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true + }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "cli-table3": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz", + "integrity": "sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==", + "dev": true, + "requires": { + "@colors/colors": "1.5.0", + "string-width": "^4.2.0" + } + }, + "cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dev": true, + "requires": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true + }, + "common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "cypress": { + "version": "12.17.4", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.17.4.tgz", + "integrity": "sha512-gAN8Pmns9MA5eCDFSDJXWKUpaL3IDd89N9TtIupjYnzLSmlpVr+ZR+vb4U/qaMp+lB6tBvAmt7504c3Z4RU5KQ==", + "dev": true, + "requires": { + "@cypress/request": "2.88.12", + "@cypress/xvfb": "^1.2.4", + "@types/node": "^16.18.39", + "@types/sinonjs__fake-timers": "8.1.1", + "@types/sizzle": "^2.3.2", + "arch": "^2.2.0", + "blob-util": "^2.0.2", + "bluebird": "^3.7.2", + "buffer": "^5.6.0", + "cachedir": "^2.3.0", + "chalk": "^4.1.0", + "check-more-types": "^2.24.0", + "cli-cursor": "^3.1.0", + "cli-table3": "~0.6.1", + "commander": "^6.2.1", + "common-tags": "^1.8.0", + "dayjs": "^1.10.4", + "debug": "^4.3.4", + "enquirer": "^2.3.6", + "eventemitter2": "6.4.7", + "execa": "4.1.0", + "executable": "^4.1.1", + "extract-zip": "2.0.1", + "figures": "^3.2.0", + "fs-extra": "^9.1.0", + "getos": "^3.2.1", + "is-ci": "^3.0.0", + "is-installed-globally": "~0.4.0", + "lazy-ass": "^1.6.0", + "listr2": "^3.8.3", + "lodash": "^4.17.21", + "log-symbols": "^4.0.0", + "minimist": "^1.2.8", + "ospath": "^1.2.2", + "pretty-bytes": "^5.6.0", + "process": "^0.11.10", + "proxy-from-env": "1.0.0", + "request-progress": "^3.0.0", + "semver": "^7.5.3", + "supports-color": "^8.1.1", + "tmp": "~0.2.1", + "untildify": "^4.0.0", + "yauzl": "^2.10.0" + } + }, + "cypress-axe": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/cypress-axe/-/cypress-axe-1.5.0.tgz", + "integrity": "sha512-Hy/owCjfj+25KMsecvDgo4fC/781ccL+e8p+UUYoadGVM2ogZF9XIKbiM6KI8Y3cEaSreymdD6ZzccbI2bY0lQ==" + }, + "cypress-html-validate": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/cypress-html-validate/-/cypress-html-validate-5.1.2.tgz", + "integrity": "sha512-e8xL6YvnwRwN/1ey0aTQRbsE50rmUtT7WXPnr7ZjAUhTm/kRMKBzxmCZRqiGhy5Q4lOLOHlOkGFg2wCObvENcQ==" + }, + "cypress-recurse": { + "version": "1.35.3", + "resolved": "https://registry.npmjs.org/cypress-recurse/-/cypress-recurse-1.35.3.tgz", + "integrity": "sha512-NbFOpEuZT4tFqAB0jQqel7WtVNDe8pvSHE2TfXvYk4pspf3wq98OC2RhhLn3bMnoCnPtY4IHO7e37c+CZ9HnMA==", + "requires": { + "humanize-duration": "^3.27.3" + } + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, + "dayjs": { + "version": "1.11.10", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz", + "integrity": "sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==", + "dev": true + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==" + }, + "define-data-property": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", + "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "dev": true, + "requires": { + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true + }, + "dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "requires": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + } + }, + "domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" + }, + "domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "requires": { + "domelementtype": "^2.3.0" + } + }, + "domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "requires": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + } + }, + "eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, + "ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", + "dev": true, + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "encoding-japanese": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encoding-japanese/-/encoding-japanese-2.0.0.tgz", + "integrity": "sha512-++P0RhebUC8MJAwJOsT93dT+5oc5oPImp1HubZpAuCZ5kTLnhuuBhKHj2jJeO/Gj93idPBWmIuQ9QWMe5rX3pQ==" + }, + "end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "requires": { + "once": "^1.4.0" + } + }, + "enquirer": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.4.1.tgz", + "integrity": "sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==", + "dev": true, + "requires": { + "ansi-colors": "^4.1.1", + "strip-ansi": "^6.0.1" + } + }, + "entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==" + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true + }, + "eslint-visitor-keys": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", + "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", + "dev": true + }, + "espree": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.0.tgz", + "integrity": "sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw==", + "dev": true, + "requires": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" + } + }, + "eventemitter2": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.7.tgz", + "integrity": "sha512-tYUSVOGeQPKt/eC1ABfhHy5Xd96N3oIijJvN3O9+TsC28T5V9yX9oEfEK5faP0EFSNVOG97qtAS68GBrQB2hDg==", + "dev": true + }, + "execa": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", + "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "signal-exit": "^3.0.2", + "strip-final-newline": "^2.0.0" + } + }, + "executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dev": true, + "requires": { + "pify": "^2.2.0" + } + }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true + }, + "extract-zip": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", + "dev": true, + "requires": { + "@types/yauzl": "^2.9.1", + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" + } + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", + "dev": true + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "requires": { + "pend": "~1.2.0" + } + }, + "figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "dependencies": { + "signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true + } + } + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", + "dev": true + }, + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "requires": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "dependencies": { + "universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true + } + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true + }, + "get-intrinsic": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", + "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "dev": true, + "requires": { + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + } + }, + "get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + }, + "getos": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", + "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", + "dev": true, + "requires": { + "async": "^3.2.0" + } + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "global-dirs": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz", + "integrity": "sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==", + "dev": true, + "requires": { + "ini": "2.0.0" + } + }, + "gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dev": true, + "requires": { + "get-intrinsic": "^1.1.3" + } + }, + "graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "has-property-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", + "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "dev": true, + "requires": { + "get-intrinsic": "^1.2.2" + } + }, + "has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "dev": true + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true + }, + "hasown": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", + "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "dev": true, + "requires": { + "function-bind": "^1.1.2" + } + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==" + }, + "html-to-text": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/html-to-text/-/html-to-text-9.0.5.tgz", + "integrity": "sha512-qY60FjREgVZL03vJU6IfMV4GDjGBIoOyvuFdpBDIX9yTlDw0TjxVBQp+P8NvpdIXNJvfWBTNul7fsAQJq2FNpg==", + "requires": { + "@selderee/plugin-htmlparser2": "^0.11.0", + "deepmerge": "^4.3.1", + "dom-serializer": "^2.0.0", + "htmlparser2": "^8.0.2", + "selderee": "^0.11.0" + } + }, + "html-validate": { + "version": "7.18.1", + "resolved": "https://registry.npmjs.org/html-validate/-/html-validate-7.18.1.tgz", + "integrity": "sha512-K5jb0h/xAoeR8sJqyR0n/QaKL7rdT88sPCtN+Pvtyn5JUU+nidQe2gBB09WRzPTcQtPXBj4QxBUH5IA2tt8JQg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.10.0", + "@html-validate/stylish": "^4.0.1", + "@sidvind/better-ajv-errors": "^2.0.0", + "acorn-walk": "^8.0.0", + "ajv": "^8.0.0", + "deepmerge": "^4.2.0", + "espree": "^9.0.0", + "glob": "^10.0.0", + "ignore": "^5.0.0", + "kleur": "^4.1.0", + "minimist": "^1.2.0", + "prompts": "^2.0.0", + "semver": "^7.0.0" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "glob": { + "version": "10.3.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.3.tgz", + "integrity": "sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==", + "dev": true, + "requires": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.0.3", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + } + }, + "minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, + "htmlparser2": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", + "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==", + "requires": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "entities": "^4.4.0" + } + }, + "http-signature": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" + } + }, + "human-signals": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", + "dev": true + }, + "humanize-duration": { + "version": "3.32.0", + "resolved": "https://registry.npmjs.org/humanize-duration/-/humanize-duration-3.32.0.tgz", + "integrity": "sha512-6WsXYTHJr7hXKqoqf5zoWza/lANRAqGlbnZnm0cjDykbXuez1JVXOQGmq0EPB45pXYAJyueRA3S3hfhmMbrMEQ==" + }, + "iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + }, + "ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true + }, + "ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "dev": true + }, + "imap": { + "version": "0.8.19", + "resolved": "https://registry.npmjs.org/imap/-/imap-0.8.19.tgz", + "integrity": "sha512-z5DxEA1uRnZG73UcPA4ES5NSCGnPuuouUx43OPX7KZx1yzq3N8/vx2mtXEShT5inxB3pRgnfG1hijfu7XN2YMw==", + "requires": { + "readable-stream": "1.1.x", + "utf7": ">=1.0.2" + }, + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + } + } + }, + "imap-simple": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/imap-simple/-/imap-simple-5.1.0.tgz", + "integrity": "sha512-FLZm1v38C5ekN46l/9X5gBRNMQNVc5TSLYQ3Hsq3xBLvKwt1i5fcuShyth8MYMPuvId1R46oaPNrH92hFGHr/g==", + "requires": { + "iconv-lite": "~0.4.13", + "imap": "^0.8.18", + "nodeify": "^1.0.0", + "quoted-printable": "^1.0.0", + "utf8": "^2.1.1", + "uuencode": "0.0.4" + }, + "dependencies": { + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + } + } + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "ini": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "dev": true + }, + "is-ci": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dev": true, + "requires": { + "ci-info": "^3.2.0" + } + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-installed-globally": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", + "dev": true, + "requires": { + "global-dirs": "^3.0.0", + "is-path-inside": "^3.0.2" + } + }, + "is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true + }, + "is-promise": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-1.0.1.tgz", + "integrity": "sha512-mjWH5XxnhMA8cFnDchr6qRP9S/kLntKuEfIYku+PaN1CnS8v+OG9O/BKpRCVRJvpIkgAZm0Pf5Is3iSSOILlcg==" + }, + "is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", + "dev": true + }, + "is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true + }, + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", + "dev": true + }, + "jackspeak": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.0.tgz", + "integrity": "sha512-uKmsITSsF4rUWQHzqaRUuyAir3fZfW3f202Ee34lz/gZCi970CPZwyQXLGNgWJvvZbvFyzeyGq0+4fcG/mBKZg==", + "dev": true, + "requires": { + "@isaacs/cliui": "^8.0.2", + "@pkgjs/parseargs": "^0.11.0" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==", + "dev": true + }, + "json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true + }, + "jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + }, + "dependencies": { + "universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true + } + } + }, + "jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "requires": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + } + }, + "jsprim": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", + "dev": true, + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "requires": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true + }, + "lazy-ass": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", + "integrity": "sha512-cc8oEVoctTvsFZ/Oje/kGnHbpWHYBe8IAJe4C0QNc3t8uM/0Y8+erSz/7Y1ALuXTEZTMvxXwO6YbX1ey3ujiZw==", + "dev": true + }, + "leac": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/leac/-/leac-0.6.0.tgz", + "integrity": "sha512-y+SqErxb8h7nE/fiEX07jsbuhrpO9lL8eca7/Y1nuWV2moNlXhyd59iDGcRf6moVyDMbmTNzL40SUyrFU/yDpg==" + }, + "libbase64": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/libbase64/-/libbase64-1.2.1.tgz", + "integrity": "sha512-l+nePcPbIG1fNlqMzrh68MLkX/gTxk/+vdvAb388Ssi7UuUN31MI44w4Yf33mM3Cm4xDfw48mdf3rkdHszLNew==" + }, + "libmime": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/libmime/-/libmime-5.2.1.tgz", + "integrity": "sha512-A0z9O4+5q+ZTj7QwNe/Juy1KARNb4WaviO4mYeFC4b8dBT2EEqK2pkM+GC8MVnkOjqhl5nYQxRgnPYRRTNmuSQ==", + "requires": { + "encoding-japanese": "2.0.0", + "iconv-lite": "0.6.3", + "libbase64": "1.2.1", + "libqp": "2.0.1" + } + }, + "libqp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/libqp/-/libqp-2.0.1.tgz", + "integrity": "sha512-Ka0eC5LkF3IPNQHJmYBWljJsw0UvM6j+QdKRbWyCdTmYwvIDE6a7bCm0UkTAL/K+3KXK5qXT/ClcInU01OpdLg==" + }, + "linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "requires": { + "uc.micro": "^2.0.0" + } + }, + "listr2": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", + "integrity": "sha512-TyWI8G99GX9GjE54cJ+RrNMcIFBfwMPxc3XTFiAYGN4s10hWROGtOg7+O6u6LE3mNkyld7RSLE6nrKBvTfcs3g==", + "dev": true, + "requires": { + "cli-truncate": "^2.1.0", + "colorette": "^2.0.16", + "log-update": "^4.0.0", + "p-map": "^4.0.0", + "rfdc": "^1.3.0", + "rxjs": "^7.5.1", + "through": "^2.3.8", + "wrap-ansi": "^7.0.0" + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" + }, + "lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" + }, + "lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" + }, + "lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" + }, + "lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" + }, + "lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" + }, + "lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" + }, + "log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "requires": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + } + }, + "log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dev": true, + "requires": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" + }, + "dependencies": { + "slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + } + }, + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + } + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "mailparser": { + "version": "3.6.6", + "resolved": "https://registry.npmjs.org/mailparser/-/mailparser-3.6.6.tgz", + "integrity": "sha512-noCjBl3FToxmqTP2fp7z17hQsiCroWNntfTd8O+UejOAF59xeN5WGZK27ilexXV2e2X/cbUhG3L8sfEKaz0/sw==", + "requires": { + "encoding-japanese": "2.0.0", + "he": "1.2.0", + "html-to-text": "9.0.5", + "iconv-lite": "0.6.3", + "libmime": "5.2.1", + "linkify-it": "5.0.0", + "mailsplit": "5.4.0", + "nodemailer": "6.9.8", + "tlds": "1.248.0" + }, + "dependencies": { + "nodemailer": { + "version": "6.9.8", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.8.tgz", + "integrity": "sha512-cfrYUk16e67Ks051i4CntM9kshRYei1/o/Gi8K1d+R34OIs21xdFnW7Pt7EucmVKA0LKtqUGNcjMZ7ehjl49mQ==" + } + } + }, + "mailsplit": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/mailsplit/-/mailsplit-5.4.0.tgz", + "integrity": "sha512-wnYxX5D5qymGIPYLwnp6h8n1+6P6vz/MJn5AzGjZ8pwICWssL+CCQjWBIToOVHASmATot4ktvlLo6CyLfOXWYA==", + "requires": { + "libbase64": "1.2.1", + "libmime": "5.2.0", + "libqp": "2.0.1" + }, + "dependencies": { + "libmime": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/libmime/-/libmime-5.2.0.tgz", + "integrity": "sha512-X2U5Wx0YmK0rXFbk67ASMeqYIkZ6E5vY7pNWRKtnNzqjvdYYG8xtPDpCnuUEnPU9vlgNev+JoSrcaKSUaNvfsw==", + "requires": { + "encoding-japanese": "2.0.0", + "iconv-lite": "0.6.3", + "libbase64": "1.2.1", + "libqp": "2.0.1" + } + } + } + }, + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "requires": { + "mime-db": "1.52.0" + } + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true + }, + "minipass": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", + "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", + "dev": true + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "nodeify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/nodeify/-/nodeify-1.0.1.tgz", + "integrity": "sha512-n7C2NyEze8GCo/z73KdbjRsBiLbv6eBn1FxwYKQ23IqGo7pQY3mhQan61Sv7eEDJCiyUjTVrVkXTzJCo1dW7Aw==", + "requires": { + "is-promise": "~1.0.0", + "promise": "~1.3.0" + } + }, + "nodemailer": { + "version": "6.9.13", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.13.tgz", + "integrity": "sha512-7o38Yogx6krdoBf3jCAqnIN4oSQFx+fMa0I7dK1D+me9kBxx12D+/33wSb+fhOCtIxvYJ+4x4IMEhmhCKfAiOA==" + }, + "npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "requires": { + "path-key": "^3.0.0" + } + }, + "object-inspect": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "ospath": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", + "integrity": "sha512-o6E5qJV5zkAbIDNhGSIlyOhScKXgQrSRMilfph0clDfM0nEnBOlKlH4sWDmG95BW/CvwNz0vmm7dJVtU2KlMiA==", + "dev": true + }, + "p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "requires": { + "aggregate-error": "^3.0.0" + } + }, + "parseley": { + "version": "0.12.1", + "resolved": "https://registry.npmjs.org/parseley/-/parseley-0.12.1.tgz", + "integrity": "sha512-e6qHKe3a9HWr0oMRVDTRhKce+bRO8VGQR3NyVwcjwrbhMmFCX9KszEV35+rn4AdilFAq9VPxP/Fe1wC9Qjd2lw==", + "requires": { + "leac": "^0.6.0", + "peberminta": "^0.9.0" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "path-scurry": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", + "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", + "dev": true, + "requires": { + "lru-cache": "^9.1.1 || ^10.0.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "dependencies": { + "lru-cache": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", + "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", + "dev": true + } + } + }, + "peberminta": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/peberminta/-/peberminta-0.9.0.tgz", + "integrity": "sha512-XIxfHpEuSJbITd1H3EeQwpcZbTLHc+VVr8ANI9t5sit565tsI4/xK3KWTUFE2e6QiangUkh3B0jihzmGnNrRsQ==" + }, + "pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "dev": true + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==", + "dev": true + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true + }, + "pretty-bytes": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", + "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", + "dev": true + }, + "process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "dev": true + }, + "promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/promise/-/promise-1.3.0.tgz", + "integrity": "sha512-R9WrbTF3EPkVtWjp7B7umQGVndpsi+rsDAfrR4xAALQpFLa/+2OriecLhawxzvii2gd9+DZFwROWDuUUaqS5yA==", + "requires": { + "is-promise": "~1" + } + }, + "prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "requires": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "dependencies": { + "kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true + } + } + }, + "proxy-from-env": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", + "integrity": "sha512-F2JHgJQ1iqwnHDcQjVBsq3n/uoaFL+iPW/eAeL7kVxy/2RrWaN4WroKjjvbsoRtv0ftelNyC01bjRhn/bhcf4A==", + "dev": true + }, + "psl": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", + "dev": true + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + }, + "qs": { + "version": "6.10.4", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.4.tgz", + "integrity": "sha512-OQiU+C+Ds5qiH91qh/mg0w+8nwQuLjM4F4M/PbmhDOoYehPh+Fb0bDjtR1sOvy7YKxvj28Y/M0PhP5uVX0kB+g==", + "dev": true, + "requires": { + "side-channel": "^1.0.4" + } + }, + "querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true + }, + "quoted-printable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/quoted-printable/-/quoted-printable-1.0.1.tgz", + "integrity": "sha512-cihC68OcGiQOjGiXuo5Jk6XHANTHl1K4JLk/xlEJRTIXfy19Sg6XzB95XonYgr+1rB88bCpr7WZE7D7AlZow4g==", + "requires": { + "utf8": "^2.1.0" + } + }, + "request-progress": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", + "integrity": "sha512-MnWzEHHaxHO2iWiQuHrUPBi/1WeBf5PkxQqNyNvLl9VAYSdXkP8tQ3pBSeCPD+yw0v0Aq1zosWLz0BdeXpWwZg==", + "dev": true, + "requires": { + "throttleit": "^1.0.0" + } + }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, + "requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true + }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, + "rfdc": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz", + "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==", + "dev": true + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "requires": { + "tslib": "^2.1.0" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "selderee": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/selderee/-/selderee-0.11.0.tgz", + "integrity": "sha512-5TF+l7p4+OsnP8BCCvSyZiSPc4x4//p5uPwK8TCnVPJYRmU2aYKMpOXvw8zM5a5JvuuCGN1jmsMwuU2W02ukfA==", + "requires": { + "parseley": "^0.12.0" + } + }, + "semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "set-function-length": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.0.tgz", + "integrity": "sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w==", + "dev": true, + "requires": { + "define-data-property": "^1.1.1", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.2", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.1" + } + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true + }, + "slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + } + }, + "sshpk": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.18.0.tgz", + "integrity": "sha512-2p2KJZTSqQ/I3+HX42EpYOa2l3f8Erv8MWKsy2I9uf4wA7yFIkXRffYdsx86y6z4vHtV8u7g+pPlr8/4ouAxsQ==", + "dev": true, + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + } + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "throttleit": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.1.tgz", + "integrity": "sha512-vDZpf9Chs9mAdfY046mcPt8fg5QSZr37hEH4TXYBnDF+izxgrbRGUAAaBvIk/fJm9aOFCGFd1EsNg5AZCbnQCQ==", + "dev": true + }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true + }, + "tlds": { + "version": "1.248.0", + "resolved": "https://registry.npmjs.org/tlds/-/tlds-1.248.0.tgz", + "integrity": "sha512-noj0KdpWTBhwsKxMOXk0rN9otg4kTgLm4WohERRHbJ9IY+kSDKr3RmjitaQ3JFzny+DyvBOQKlFZhp0G0qNSfg==" + }, + "tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dev": true, + "requires": { + "rimraf": "^3.0.0" + } + }, + "tough-cookie": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", + "dev": true, + "requires": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + } + }, + "tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "dev": true + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dev": true, + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", + "dev": true + }, + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true + }, + "uc.micro": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.0.0.tgz", + "integrity": "sha512-DffL94LsNOccVn4hyfRe5rdKa273swqeA5DJpMOeFmEn1wCDc7nAbbB0gXlgBCL7TNzeTv6G7XVWzan7iJtfig==" + }, + "universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true + }, + "untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "requires": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, + "utf7": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/utf7/-/utf7-1.0.2.tgz", + "integrity": "sha512-qQrPtYLLLl12NF4DrM9CvfkxkYI97xOb5dsnGZHE3teFr0tWiEZ9UdgMPczv24vl708cYMpe6mGXGHrotIp3Bw==", + "requires": { + "semver": "~5.3.0" + }, + "dependencies": { + "semver": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "integrity": "sha512-mfmm3/H9+67MCVix1h+IXTpDwL6710LyHuk7+cWC9T1mE0qz4iHhh6r4hU2wrIT9iTsAAC2XQRvfblL028cpLw==" + } + } + }, + "utf8": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/utf8/-/utf8-2.1.2.tgz", + "integrity": "sha512-QXo+O/QkLP/x1nyi54uQiG0XrODxdysuQvE5dtVqv7F5K2Qb6FsN+qbr6KhF5wQ20tfcV3VQp0/2x1e1MRSPWg==" + }, + "uuencode": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/uuencode/-/uuencode-0.0.4.tgz", + "integrity": "sha512-yEEhCuCi5wRV7Z5ZVf9iV2gWMvUZqKJhAs1ecFdKJ0qzbyaVelmsE3QjYAamehfp9FKLiZbKldd+jklG3O0LfA==" + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "requires": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + } + } +} diff --git a/tests_cypress/package.json b/tests_cypress/package.json new file mode 100644 index 0000000000..a5786bc135 --- /dev/null +++ b/tests_cypress/package.json @@ -0,0 +1,21 @@ +{ + "scripts": { + "cypress": "ETHEREAL_CACHE=false npx cypress open", + "smoke": "ETHEREAL_CACHE=false npx cypress run --spec cypress/e2e/smoke/smoke.cy.js", + "api": "ETHEREAL_CACHE=false npx cypress run --spec 'cypress/e2e/api/all.cy.js'" + }, + "dependencies": { + "cypress-axe": "^1.4.0", + "cypress-html-validate": "^5.1.2", + "cypress-recurse": "^1.23.0", + "imap-simple": "^5.1.0", + "jsonwebtoken": "^9.0.0", + "mailparser": "^3.5.0", + "nodemailer": "^6.9.2" + }, + "devDependencies": { + "axe-core": "^4.7.2", + "cypress": "^12.16.0", + "html-validate": "^7.7.1" + } +} diff --git a/tests_smoke/README.md b/tests_smoke/README.md new file mode 100644 index 0000000000..531de7879a --- /dev/null +++ b/tests_smoke/README.md @@ -0,0 +1,14 @@ +# Smoke Tests + +This repository contains a set of smoke tests for our application. Smoke testing, also known as "Build Verification Testing", is a type of software testing that comprises of a non-exhaustive set of tests that aim at ensuring that the most important functions work. The phrase 'smoke testing' comes from the hardware testing, where you plug in a new piece of hardware and turn it on for the first time. If it starts smoking, you know you have a problem. + +## Getting Started + +These smoke tests are designed to run in the api devcontainer. + +in the root of the repo create `.env` files for the environments you with to smoke test, for example `.env_smoke_local`, `.env_smoke_staging`, and `.env_smoke_prod`. For required values see the [.env.example](.env.example) file). + +## Running the tests + +in the devcontainer run the aliases `smoke-local`, `smoke-staging`, or `smoke-prod` to run the tests. + diff --git a/tests_smoke/__init__.py b/tests_smoke/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests_smoke/send_many.py b/tests_smoke/send_many.py new file mode 100644 index 0000000000..62adca2ecd --- /dev/null +++ b/tests_smoke/send_many.py @@ -0,0 +1,72 @@ +import argparse +import time +from datetime import datetime + +import requests +from dotenv import load_dotenv +from smoke.common import ( # type: ignore + Config, + Notification_type, + create_jwt_token, + job_line, + rows_to_csv, + s3upload, + set_metadata_on_csv_upload, +) + +DEFAULT_JOB_SIZE = 50000 + + +def send_admin_csv(notification_type: Notification_type, job_size: int): + """Send a bulk job of notifications by uploading a CSV + + Args: + notification_type (Notification_type): email or sms + job_size (int): number of notifications to send + """ + + template_id = Config.EMAIL_TEMPLATE_ID if notification_type == Notification_type.EMAIL else Config.SMS_TEMPLATE_ID + to = Config.EMAIL_TO if notification_type == Notification_type.EMAIL else Config.SMS_TO + header = "email address" if notification_type == Notification_type.EMAIL else "phone number" + + csv = rows_to_csv([[header, "var"], *job_line(to, job_size)]) + upload_id = s3upload(Config.SERVICE_ID, csv) + metadata_kwargs = { + "notification_count": 1, + "template_id": template_id, + "valid": True, + "original_file_name": f"Large send {datetime.utcnow().isoformat()}.csv", + } + set_metadata_on_csv_upload(Config.SERVICE_ID, upload_id, **metadata_kwargs) + + token = create_jwt_token(Config.ADMIN_CLIENT_SECRET, client_id=Config.ADMIN_CLIENT_USER_NAME) + response = requests.post( + f"{Config.API_HOST_NAME}/service/{Config.SERVICE_ID}/job", + json={"id": upload_id, "created_by": Config.USER_ID}, + headers={"Authorization": f"Bearer {token}"}, + ) + if response.status_code != 201: + print(response.json()) + print("FAILED: post to start send failed") + exit(1) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-n", "--notifications", default=1, type=int, help="total number of notifications") + parser.add_argument("-j", "--job_size", default=DEFAULT_JOB_SIZE, type=int, help=f"size of bulk send jobs (default {DEFAULT_JOB_SIZE})") + parser.add_argument("--sms", default=False, action='store_true', help="send sms instead of emails") + + args = parser.parse_args() + load_dotenv() + + notification_type = Notification_type.SMS if args.sms else Notification_type.EMAIL + for start_n in range(0, args.notifications, args.job_size): + num_sending = min(args.notifications - start_n, args.job_size) + print(f"Sending {start_n} - {start_n + num_sending - 1} of {args.notifications}") + send_admin_csv(notification_type, num_sending) + time.sleep(1) + + +if __name__ == "__main__": + main() diff --git a/tests_smoke/smoke/common.py b/tests_smoke/smoke/common.py index 4639549e7c..f248bc8926 100644 --- a/tests_smoke/smoke/common.py +++ b/tests_smoke/smoke/common.py @@ -2,8 +2,6 @@ import json import os import time - -# from notifications_utils.s3 import s3upload as utils_s3upload import urllib import uuid from enum import Enum @@ -16,29 +14,37 @@ from dotenv import load_dotenv from notifications_python_client.authentication import create_jwt_token +# from app/config.py +INTERNAL_TEST_NUMBER = "+16135550123" +INTERNAL_TEST_EMAIL_ADDRESS = "internal.test@cds-snc.ca" + load_dotenv() class Config: - API_HOST_NAME = os.environ.get("SMOKE_API_HOST_NAME") - EMAIL_TO = os.environ.get("SMOKE_EMAIL_TO", "") - SMS_TO = os.environ.get("SMOKE_SMS_TO", "") + API_HOST_NAME = os.environ.get("SMOKE_API_HOST_NAME", "http://localhost:6011") + IS_LOCAL = "localhost" in API_HOST_NAME + ADMIN_CLIENT_USER_NAME = "notify-admin" + ADMIN_CLIENT_SECRET = os.environ.get("SMOKE_ADMIN_CLIENT_SECRET", "local_app") + POLL_TIMEOUT = int(os.environ.get("SMOKE_POLL_TIMEOUT", 120)) + AWS_REGION = "ca-central-1" + CSV_UPLOAD_BUCKET_NAME = os.environ.get("SMOKE_CSV_UPLOAD_BUCKET_NAME", "notification-canada-ca-staging-csv-upload") + + AWS_ACCESS_KEY_ID = os.environ.get("SMOKE_AWS_ACCESS_KEY_ID") + AWS_SECRET_ACCESS_KEY = os.environ.get("SMOKE_AWS_SECRET_ACCESS_KEY") SERVICE_ID = os.environ.get("SMOKE_SERVICE_ID", "") + USER_ID = os.environ.get("SMOKE_USER_ID") + EMAIL_TO = os.environ.get("SMOKE_EMAIL_TO", INTERNAL_TEST_EMAIL_ADDRESS) + SMS_TO = os.environ.get("SMOKE_SMS_TO", INTERNAL_TEST_NUMBER) EMAIL_TEMPLATE_ID = os.environ.get("SMOKE_EMAIL_TEMPLATE_ID") SMS_TEMPLATE_ID = os.environ.get("SMOKE_SMS_TEMPLATE_ID") - - USER_ID = os.environ.get("SMOKE_USER_ID") - AWS_REGION = "ca-central-1" - CSV_UPLOAD_BUCKET_NAME = os.environ.get("SMOKE_CSV_UPLOAD_BUCKET_NAME") - ADMIN_CLIENT_USER_NAME = "notify-admin" - ADMIN_CLIENT_SECRET = os.environ.get("SMOKE_ADMIN_CLIENT_SECRET") API_KEY = os.environ.get("SMOKE_API_KEY", "") - POLL_TIMEOUT = int(os.environ.get("SMOKE_POLL_TIMEOUT", 20)) + JOB_SIZE = int(os.environ.get("SMOKE_JOB_SIZE", 2)) boto_session = Session( - aws_access_key_id=os.environ.get("SMOKE_AWS_ACCESS_KEY_ID"), - aws_secret_access_key=os.environ.get("SMOKE_AWS_SECRET_ACCESS_KEY"), + aws_access_key_id=Config.AWS_ACCESS_KEY_ID, + aws_secret_access_key=Config.AWS_SECRET_ACCESS_KEY, ) @@ -60,8 +66,8 @@ def rows_to_csv(rows: List[List[str]]): return output.getvalue() -def job_line(data: str, number_of_lines: int) -> Iterator[List[str]]: - return map(lambda n: [data, f"var{n}"], range(0, number_of_lines)) +def job_line(data: str, number_of_lines: int, prefix: str = "") -> Iterator[List[str]]: + return map(lambda n: [data, f"{prefix} {n}"], range(0, number_of_lines)) def pretty_print(data: Any): @@ -76,14 +82,14 @@ def single_succeeded(uri: str, use_jwt: bool) -> bool: token = create_jwt_token(Config.ADMIN_CLIENT_SECRET, client_id=Config.ADMIN_CLIENT_USER_NAME) headers = {"Authorization": f"Bearer {token}"} else: - headers = {"Authorization": f"ApiKey-v1 {Config.API_KEY[-36:]}"} + headers = {"Authorization": f"ApiKey-v1 {Config.API_KEY}"} response = requests.get( uri, headers=headers, ) body = response.json() - success = body.get("status") == "delivered" + success = body.get("status") == "delivered" or (Config.IS_LOCAL and "fail" not in body.get("status", "")) failure = body.get("status") == "permanent-failure" if success or failure: break @@ -103,7 +109,7 @@ def job_succeeded(service_id: str, job_id: str) -> bool: data = response.json()["data"] if data["job_status"] != "finished": next - success = all([stat["status"] == "delivered" for stat in data["statistics"]]) + success = all([stat["status"] == "delivered" for stat in data["statistics"]]) or (Config.IS_LOCAL and all(["fail" not in stat["status"] for stat in data["statistics"]])) failure = any([stat["status"] == "permanent-failure" for stat in data["statistics"]]) if success or failure: break @@ -113,7 +119,6 @@ def job_succeeded(service_id: str, job_id: str) -> bool: return success -# from notifications_utils.s3 import s3upload as utils_s3upload def utils_s3upload(filedata, region, bucket_name, file_location, content_type="binary/octet-stream", tags=None): _s3 = boto_session.resource("s3") diff --git a/tests_smoke/smoke/test_admin_csv.py b/tests_smoke/smoke/test_admin_csv.py index fc2d035a0a..e6f962266f 100644 --- a/tests_smoke/smoke/test_admin_csv.py +++ b/tests_smoke/smoke/test_admin_csv.py @@ -13,13 +13,13 @@ ) -def test_admin_csv(notification_type: Notification_type): +def test_admin_csv(notification_type: Notification_type, local: bool = False): print(f"test_admin_csv ({notification_type.value})... ", end="", flush=True) if notification_type == Notification_type.EMAIL: - data = rows_to_csv([["email address", "var"], *job_line(Config.EMAIL_TO, 2)]) + data = rows_to_csv([["email address", "var"], *job_line(Config.EMAIL_TO, Config.JOB_SIZE, prefix="smoke test admin csv")]) else: - data = rows_to_csv([["phone number", "var"], *job_line(Config.SMS_TO, 2)]) + data = rows_to_csv([["phone number", "var"], *job_line(Config.SMS_TO, Config.JOB_SIZE, prefix="smoke test admin csv")]) upload_id = s3upload(Config.SERVICE_ID, data) metadata_kwargs = { @@ -42,8 +42,11 @@ def test_admin_csv(notification_type: Notification_type): print("FAILED: post to send_notification failed") exit(1) - success = job_succeeded(Config.SERVICE_ID, upload_id) - if not success: - print("FAILED: job didn't finish successfully") - exit(1) - print("Success") + if local: + print(f"Check manually for {Config.JOB_SIZE} {notification_type.value}s") + else: + success = job_succeeded(Config.SERVICE_ID, upload_id) + if not success: + print("FAILED: job didn't finish successfully") + exit(1) + print("Success") diff --git a/tests_smoke/smoke/test_admin_one_off.py b/tests_smoke/smoke/test_admin_one_off.py index 8d52ea55a6..faaee84c92 100644 --- a/tests_smoke/smoke/test_admin_one_off.py +++ b/tests_smoke/smoke/test_admin_one_off.py @@ -4,7 +4,7 @@ from .common import Config, Notification_type, pretty_print, single_succeeded -def test_admin_one_off(notification_type: Notification_type): +def test_admin_one_off(notification_type: Notification_type, local: bool = False): print(f"test_admin_one_off ({notification_type.value})... ", end="", flush=True) token = create_jwt_token(Config.ADMIN_CLIENT_SECRET, client_id=Config.ADMIN_CLIENT_USER_NAME) @@ -17,7 +17,7 @@ def test_admin_one_off(notification_type: Notification_type): "to": to, "template_id": template_id, "created_by": Config.USER_ID, - "personalisation": {"var": "var"}, + "personalisation": {"var": "smoke test admin one off"}, }, headers={"Authorization": f"Bearer {token}"}, ) @@ -28,9 +28,12 @@ def test_admin_one_off(notification_type: Notification_type): print("FAILED: post to send_notification failed") exit(1) - uri = f"{Config.API_HOST_NAME}/service/{Config.SERVICE_ID}/notifications/{body['id']}" - success = single_succeeded(uri, use_jwt=True) - if not success: - print("FAILED: job didn't finish successfully") - exit(1) - print("Success") + if local: + print(f"Check manually for 1 {notification_type.value}") + else: + uri = f"{Config.API_HOST_NAME}/service/{Config.SERVICE_ID}/notifications/{body['id']}" + success = single_succeeded(uri, use_jwt=True) + if not success: + print("FAILED: job didn't finish successfully") + exit(1) + print("Success") diff --git a/tests_smoke/smoke/test_api_bulk.py b/tests_smoke/smoke/test_api_bulk.py index 1817a2b552..91897770cf 100644 --- a/tests_smoke/smoke/test_api_bulk.py +++ b/tests_smoke/smoke/test_api_bulk.py @@ -12,7 +12,7 @@ ) -def test_api_bulk(notification_type: Notification_type): +def test_api_bulk(notification_type: Notification_type, local: bool = False): print(f"test_api_bulk ({notification_type.value})... ", end="", flush=True) template_id = Config.EMAIL_TEMPLATE_ID if notification_type == Notification_type.EMAIL else Config.SMS_TEMPLATE_ID to = Config.EMAIL_TO if notification_type == Notification_type.EMAIL else Config.SMS_TO @@ -23,17 +23,20 @@ def test_api_bulk(notification_type: Notification_type): json={ "name": f"My bulk name {datetime.utcnow().isoformat()}", "template_id": template_id, - "csv": rows_to_csv([[header, "var"], *job_line(to, 2)]), + "csv": rows_to_csv([[header, "var"], *job_line(to, Config.JOB_SIZE, prefix="smoke test api bulk")]), }, - headers={"Authorization": f"ApiKey-v1 {Config.API_KEY[-36:]}"}, + headers={"Authorization": f"ApiKey-v1 {Config.API_KEY}"}, ) if response.status_code != 201: pretty_print(response.json()) print("FAILED: post failed") exit(1) - success = job_succeeded(Config.SERVICE_ID, response.json()["data"]["id"]) - if not success: - print("FAILED: job didn't finish successfully") - exit(1) - print("Success") + if local: + print(f"Check manually for {Config.JOB_SIZE} {notification_type.value}s") + else: + success = job_succeeded(Config.SERVICE_ID, response.json()["data"]["id"]) + if not success: + print("FAILED: job didn't finish successfully") + exit(1) + print("Success") diff --git a/tests_smoke/smoke/test_api_one_off.py b/tests_smoke/smoke/test_api_one_off.py index 1c19ec1018..d4ab8e470a 100644 --- a/tests_smoke/smoke/test_api_one_off.py +++ b/tests_smoke/smoke/test_api_one_off.py @@ -11,7 +11,7 @@ ) -def test_api_one_off(notification_type: Notification_type, attachment_type: Attachment_type = Attachment_type.NONE): +def test_api_one_off(notification_type: Notification_type, attachment_type: Attachment_type = Attachment_type.NONE, local: bool = False): if attachment_type is Attachment_type.NONE: print(f"test_api_oneoff ({notification_type.value})... ", end="", flush=True) else: @@ -51,23 +51,25 @@ def test_api_one_off(notification_type: Notification_type, attachment_type: Atta } else: data["personalisation"] = { - "var": "var", + "var": "smoke test api one off", } response = requests.post( f"{Config.API_HOST_NAME}/v2/notifications/{notification_type.value}", json=data, - headers={"Authorization": f"ApiKey-v1 {Config.API_KEY[-36:]}"}, + headers={"Authorization": f"ApiKey-v1 {Config.API_KEY}"}, ) if response.status_code != 201: pretty_print(response.json()) print(f"FAILED: post to v2/notifications/{notification_type.value} failed") exit(1) - uri = response.json()["uri"] - - success = single_succeeded(uri, use_jwt=False) - if not success: - print("FAILED: job didn't finish successfully") - exit(1) - print("Success") + if local: + print(f"Check manually for 1 {notification_type.value}") + else: + uri = response.json()["uri"] + success = single_succeeded(uri, use_jwt=False) + if not success: + print("FAILED: job didn't finish successfully") + exit(1) + print("Success") diff --git a/tests_smoke/smoke_test.py b/tests_smoke/smoke_test.py index 2b9fc2399b..ccde49ddef 100644 --- a/tests_smoke/smoke_test.py +++ b/tests_smoke/smoke_test.py @@ -1,3 +1,5 @@ +import argparse + from smoke.common import Attachment_type, Config, Notification_type # type: ignore from smoke.test_admin_csv import test_admin_csv # type: ignore from smoke.test_admin_one_off import test_admin_one_off # type: ignore @@ -5,15 +7,22 @@ from smoke.test_api_one_off import test_api_one_off # type: ignore if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-l", "--local", default=False, action='store_true', help="run locally, do not check for delivery success (default false)") + parser.add_argument("--nofiles", default=False, action='store_true', help="do not send files (default false)") + args = parser.parse_args() + print("API Smoke test\n") for key in ["API_HOST_NAME", "SERVICE_ID", "EMAIL_TEMPLATE_ID", "SMS_TEMPLATE_ID", "EMAIL_TO", "SMS_TO"]: print(f"{key:>17}: {Config.__dict__[key]}") print("") for notification_type in [Notification_type.EMAIL, Notification_type.SMS]: - test_admin_one_off(notification_type) - test_admin_csv(notification_type) - test_api_one_off(notification_type) - test_api_bulk(notification_type) - test_api_one_off(Notification_type.EMAIL, Attachment_type.ATTACHED) - test_api_one_off(Notification_type.EMAIL, Attachment_type.LINK) + test_admin_one_off(notification_type, local=args.local) + test_admin_csv(notification_type, local=args.local) + test_api_one_off(notification_type, local=args.local) + test_api_bulk(notification_type, local=args.local) + + if not args.nofiles: + test_api_one_off(Notification_type.EMAIL, attachment_type=Attachment_type.ATTACHED, local=args.local) + test_api_one_off(Notification_type.EMAIL, attachment_type=Attachment_type.LINK, local=args.local)