diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml
index 5ff8c67f88..89d32b59b0 100644
--- a/.github/workflows/docker.yaml
+++ b/.github/workflows/docker.yaml
@@ -10,6 +10,7 @@ env:
DOCKER_SLUG: public.ecr.aws/v6b8u5o6/notify-api
KUBECTL_VERSION: '1.23.6'
WORKFLOW_PAT: ${{ secrets.WORKFLOW_GITHUB_PAT }}
+ OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }}
permissions:
id-token: write # This is required for requesting the OIDC JWT
@@ -55,9 +56,51 @@ jobs:
run: |
docker push $DOCKER_SLUG:latest && docker push $DOCKER_SLUG:${GITHUB_SHA::7}
- - name: Rollout in Kubernetes
+ - name: Configure AWS credentials
+ uses: aws-actions/configure-aws-credentials@5fd3084fc36e372ff1fff382a39b10d03659f355 # v2.2.0
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: ca-central-1
+
+ - name: Install OpenVPN
+ run: |
+ sudo apt update
+ sudo apt install -y openvpn openvpn-systemd-resolved
+
+ - name: Install 1Pass CLI
+ run: |
+ curl -o 1pass.deb https://downloads.1password.com/linux/debian/amd64/stable/1password-cli-amd64-latest.deb
+ sudo dpkg -i 1pass.deb
+
+ - name: One Password Fetch
run: |
- ./scripts/callManifestsRollout.sh ${GITHUB_SHA::7}
+ op read op://4eyyuwddp6w4vxlabrr2i2duxm/"Staging Github Actions VPN"/notesPlain > /var/tmp/staging.ovpn
+
+ - name: Connect to VPN
+ uses: "kota65535/github-openvpn-connect-action@cd2ed8a90cc7b060dc4e001143e811b5f7ea0af5"
+ with:
+ config_file: /var/tmp/staging.ovpn
+ client_key: ${{ secrets.STAGING_OVPN_CLIENT_KEY }}
+ echo_config: false
+
+ - name: Configure kubeconfig
+ run: |
+ aws eks update-kubeconfig --name notification-canada-ca-staging-eks-cluster
+
+ - name: Update images in staging
+ run: |
+ DOCKER_TAG=${GITHUB_SHA::7}
+ kubectl set image deployment.apps/api api=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+ kubectl set image deployment.apps/celery-beat celery-beat=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+ kubectl set image deployment.apps/celery-sms celery-sms=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+ kubectl set image deployment.apps/celery-primary celery-primary=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+ kubectl set image deployment.apps/celery-scalable celery-scalable=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+ kubectl set image deployment.apps/celery-sms-send-primary celery-sms-send-primary=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+ kubectl set image deployment.apps/celery-sms-send-scalable celery-sms-send-scalable=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+ kubectl set image deployment.apps/celery-email-send-primary celery-email-send-primary=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+ kubectl set image deployment.apps/celery-email-send-scalable celery-email-send-scalable=$DOCKER_SLUG:$DOCKER_TAG -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
+
- name: my-app-install token
id: notify-pr-bot
diff --git a/.github/workflows/export_github_data.yml b/.github/workflows/export_github_data.yml
index 51ccbcb18b..db92588487 100644
--- a/.github/workflows/export_github_data.yml
+++ b/.github/workflows/export_github_data.yml
@@ -14,7 +14,7 @@ jobs:
DNS_PROXY_FORWARDTOSENTINEL: "true"
DNS_PROXY_LOGANALYTICSWORKSPACEID: ${{ secrets.LOG_ANALYTICS_WORKSPACE_ID }}
DNS_PROXY_LOGANALYTICSSHAREDKEY: ${{ secrets.LOG_ANALYTICS_WORKSPACE_KEY }}
- - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Export Data
uses: cds-snc/github-repository-metadata-exporter@main
with:
diff --git a/.github/workflows/ossf-scorecard.yml b/.github/workflows/ossf-scorecard.yml
index dd97d6ea44..bad8cdd45c 100644
--- a/.github/workflows/ossf-scorecard.yml
+++ b/.github/workflows/ossf-scorecard.yml
@@ -20,12 +20,12 @@ jobs:
steps:
- name: "Checkout code"
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
persist-credentials: false
- name: "Run analysis"
- uses: ossf/scorecard-action@7699f539c2b9ff754039f0e173fdf1a4e4a1e143
+ uses: ossf/scorecard-action@8c9e2c1222f54716a1df7d7bbb245e2a045b4423
with:
results_file: ossf-results.json
results_format: json
diff --git a/.github/workflows/s3-backup.yml b/.github/workflows/s3-backup.yml
index b19055191c..6a8e9670d1 100644
--- a/.github/workflows/s3-backup.yml
+++ b/.github/workflows/s3-backup.yml
@@ -10,7 +10,7 @@ jobs:
steps:
- name: Checkout
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0 # retrieve all history
diff --git a/app/__init__.py b/app/__init__.py
index c3c144620e..f831441372 100644
--- a/app/__init__.py
+++ b/app/__init__.py
@@ -153,10 +153,16 @@ def create_app(application, config=None):
# Log the application configuration
application.logger.info(f"Notify config: {config.get_safe_config()}")
- # avoid circular imports by importing this file later
- from app.commands import setup_commands
+ # avoid circular imports by importing these files later
+ from app.commands.bulk_db import setup_bulk_db_commands
+ from app.commands.deprecated import setup_deprecated_commands
+ from app.commands.support import setup_support_commands
+ from app.commands.test_data import setup_test_data_commands
- setup_commands(application)
+ setup_support_commands(application)
+ setup_bulk_db_commands(application)
+ setup_test_data_commands(application)
+ setup_deprecated_commands(application)
return application
@@ -201,6 +207,7 @@ def register_blueprint(application):
from app.service.rest import service_blueprint
from app.status.healthcheck import status as status_blueprint
from app.template.rest import template_blueprint
+ from app.template.template_category_rest import template_category_blueprint
from app.template_folder.rest import template_folder_blueprint
from app.template_statistics.rest import (
template_statistics as template_statistics_blueprint,
@@ -259,6 +266,8 @@ def register_blueprint(application):
register_notify_blueprint(application, letter_branding_blueprint, requires_admin_auth)
+ register_notify_blueprint(application, template_category_blueprint, requires_admin_auth)
+
def register_v2_blueprints(application):
from app.authentication.auth import requires_auth
diff --git a/app/clients/freshdesk.py b/app/clients/freshdesk.py
index fd0ecc978b..785096af5a 100644
--- a/app/clients/freshdesk.py
+++ b/app/clients/freshdesk.py
@@ -77,7 +77,20 @@ def _generate_description(self):
f"- Texte alternatif français : {self.contact.alt_text_fr}",
]
)
-
+ elif self.contact.is_new_template_category_request():
+ message = "
".join(
+ [
+ f"New template category request from {self.contact.name} ({self.contact.email_address}):",
+ f"- Service id: {self.contact.service_id}",
+ f"- New Template Category Request name: {self.contact.template_category_name_en}",
+ f"- Template id request: {self.contact.template_id_link}",
+ "
",
+ f"Demande de nouvelle catégorie de modèle de {self.contact.name} ({self.contact.email_address}):",
+ f"- Identifiant du service: {self.contact.service_id}",
+ f"- Nom de la nouvelle catégorie de modèle demandée: {self.contact.template_category_name_fr}",
+ f"- Demande d'identifiant de modèle: {self.contact.template_id_link}",
+ ]
+ )
if len(self.contact.user_profile):
message += f"
---
{self.contact.user_profile}"
diff --git a/app/clients/sms/__init__.py b/app/clients/sms/__init__.py
index 8d6472d19d..88ab822075 100644
--- a/app/clients/sms/__init__.py
+++ b/app/clients/sms/__init__.py
@@ -1,6 +1,13 @@
+from enum import Enum
+
from app.clients import Client, ClientException
+class SmsSendingVehicles(Enum):
+ SHORT_CODE = "short_code"
+ LONG_CODE = "long_code"
+
+
class SmsClientResponseException(ClientException):
"""
Base Exception for SmsClientsResponses
diff --git a/app/clients/sms/aws_pinpoint.py b/app/clients/sms/aws_pinpoint.py
index bdb3ba7fa7..57c58c9f13 100644
--- a/app/clients/sms/aws_pinpoint.py
+++ b/app/clients/sms/aws_pinpoint.py
@@ -3,7 +3,7 @@
import boto3
import phonenumbers
-from app.clients.sms import SmsClient
+from app.clients.sms import SmsClient, SmsSendingVehicles
class AwsPinpointClient(SmsClient):
@@ -21,38 +21,66 @@ def init_app(self, current_app, statsd_client, *args, **kwargs):
def get_name(self):
return self.name
- def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None):
+ def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None, service_id=None, sending_vehicle=None):
messageType = "TRANSACTIONAL"
matched = False
+ opted_out = False
+ response = {}
- if template_id is not None and str(template_id) in self.current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"]:
+ if self.current_app.config["FF_TEMPLATE_CATEGORY"]:
+ use_shortcode_pool = (
+ sending_vehicle == SmsSendingVehicles.SHORT_CODE
+ or str(service_id) == self.current_app.config["NOTIFY_SERVICE_ID"]
+ )
+ else:
+ use_shortcode_pool = (
+ str(template_id) in self.current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"]
+ or str(service_id) == self.current_app.config["NOTIFY_SERVICE_ID"]
+ )
+ if use_shortcode_pool:
pool_id = self.current_app.config["AWS_PINPOINT_SC_POOL_ID"]
else:
pool_id = self.current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"]
for match in phonenumbers.PhoneNumberMatcher(to, "US"):
matched = True
+ opted_out = False
to = phonenumbers.format_number(match.number, phonenumbers.PhoneNumberFormat.E164)
destinationNumber = to
-
try:
start_time = monotonic()
- response = self._client.send_text_message(
- DestinationPhoneNumber=destinationNumber,
- OriginationIdentity=pool_id,
- MessageBody=content,
- MessageType=messageType,
- ConfigurationSetName=self.current_app.config["AWS_PINPOINT_CONFIGURATION_SET_NAME"],
- )
+ # For international numbers we send with an AWS number for the corresponding country, using our default sender id.
+ # Note that Canada does not currently support sender ids.
+ if phonenumbers.region_code_for_number(match.number) != "CA":
+ response = self._client.send_text_message(
+ DestinationPhoneNumber=destinationNumber,
+ MessageBody=content,
+ MessageType=messageType,
+ ConfigurationSetName=self.current_app.config["AWS_PINPOINT_CONFIGURATION_SET_NAME"],
+ )
+ else:
+ response = self._client.send_text_message(
+ DestinationPhoneNumber=destinationNumber,
+ OriginationIdentity=pool_id,
+ MessageBody=content,
+ MessageType=messageType,
+ ConfigurationSetName=self.current_app.config["AWS_PINPOINT_CONFIGURATION_SET_NAME"],
+ )
+ except self._client.exceptions.ConflictException as e:
+ if e.response.get("Reason") == "DESTINATION_PHONE_NUMBER_OPTED_OUT":
+ opted_out = True
+ else:
+ raise e
+
except Exception as e:
self.statsd_client.incr("clients.pinpoint.error")
- raise Exception(e)
+ raise e
finally:
elapsed_time = monotonic() - start_time
self.current_app.logger.info("AWS Pinpoint request finished in {}".format(elapsed_time))
self.statsd_client.timing("clients.pinpoint.request-time", elapsed_time)
self.statsd_client.incr("clients.pinpoint.success")
- return response["MessageId"]
+ return "opted_out" if opted_out else response.get("MessageId")
if not matched:
self.statsd_client.incr("clients.pinpoint.error")
diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py
index 4847754d72..d949de4c6d 100644
--- a/app/clients/sms/aws_sns.py
+++ b/app/clients/sms/aws_sns.py
@@ -26,7 +26,7 @@ def get_name(self):
return self.name
@statsd(namespace="clients.sns")
- def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None):
+ def send_sms(self, to, content, reference, multi=True, sender=None, template_id=None, service_id=None, sending_vehicle=None):
matched = False
for match in phonenumbers.PhoneNumberMatcher(to, "US"):
diff --git a/app/commands/README.md b/app/commands/README.md
new file mode 100644
index 0000000000..8ab4ae4cd6
--- /dev/null
+++ b/app/commands/README.md
@@ -0,0 +1,15 @@
+# Flask commands
+
+Changes to the database outside Notify admin/api should be done via flask commands versus `psql` or a database client.
+
+Flask commands must be run on a server that has the api repository installed and has access to the database, for example an api or celery pod in the kubernetes cluster.
+
+Commands are run by specifying the group and command. You must also have the environment variable `FLASK_APP` set correctly. For example
+```
+FLASK_APP=application flask support list-routes
+```
+
+We currently have 4 groups of commands available: `support`, `bulk-db`, `test-data`, and `deprecated`. To see what commands are available for a group run a command such as
+```
+FLASK_APP=application flask support
+```
diff --git a/app/commands/__init__.py b/app/commands/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/app/commands/bulk_db.py b/app/commands/bulk_db.py
new file mode 100644
index 0000000000..94b29f8e82
--- /dev/null
+++ b/app/commands/bulk_db.py
@@ -0,0 +1,320 @@
+import functools
+import itertools
+import uuid
+from datetime import datetime, timedelta
+from decimal import Decimal
+
+import click
+from click_datetime import Datetime as click_dt
+from flask import cli as flask_cli
+from flask import current_app, json
+from notifications_utils.statsd_decorators import statsd
+from sqlalchemy.exc import IntegrityError
+
+from app import db
+from app.dao.annual_billing_dao import dao_create_or_update_annual_billing_for_year
+from app.dao.organisation_dao import (
+ dao_add_service_to_organisation,
+ dao_get_organisation_by_email_address,
+)
+from app.dao.provider_rates_dao import (
+ create_provider_rates as dao_create_provider_rates,
+)
+from app.dao.services_dao import dao_fetch_service_by_id
+from app.models import (
+ PROVIDERS,
+ Domain,
+ EmailBranding,
+ LetterBranding,
+ Organisation,
+ Service,
+ User,
+)
+
+
+@click.group(name="bulk-db", help="Bulk updates to the database")
+def bulk_db_group():
+ pass
+
+
+class bulk_db_command:
+ def __init__(self, name=None):
+ self.name = name
+
+ def __call__(self, func):
+ # we need to call the flask with_appcontext decorator to ensure the config is loaded, db connected etc etc.
+ # we also need to use functools.wraps to carry through the names and docstrings etc of the functions.
+ # Then we need to turn it into a click.Command - that's what command_group.add_command expects.
+ @click.command(name=self.name)
+ @functools.wraps(func)
+ @flask_cli.with_appcontext
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+
+ bulk_db_group.add_command(wrapper)
+
+ return wrapper
+
+
+def setup_bulk_db_commands(application):
+ application.cli.add_command(bulk_db_group)
+
+
+@bulk_db_command()
+@click.option("-p", "--provider_name", required=True, type=click.Choice(PROVIDERS))
+@click.option(
+ "-c",
+ "--cost",
+ required=True,
+ help="Cost (pence) per message including decimals",
+ type=float,
+)
+@click.option("-d", "--valid_from", required=True, type=click_dt(format="%Y-%m-%dT%H:%M:%S"))
+def create_provider_rates(provider_name, cost, valid_from):
+ """
+ Backfill rates for a given provider
+ """
+ cost = Decimal(cost)
+ dao_create_provider_rates(provider_name, valid_from, cost)
+
+
+@bulk_db_command(name="populate-annual-billing")
+@click.option(
+ "-y",
+ "--year",
+ required=True,
+ type=int,
+ help="""The year to populate the annual billing data for, i.e. 2019""",
+)
+def populate_annual_billing(year):
+ """
+ add annual_billing for given year.
+ """
+ sql = """
+ Select id from services where active = true
+ except
+ select service_id
+ from annual_billing
+ where financial_year_start = :year
+ """
+ services_without_annual_billing = db.session.execute(sql, {"year": year})
+ for row in services_without_annual_billing:
+ latest_annual_billing = """
+ Select free_sms_fragment_limit
+ from annual_billing
+ where service_id = :service_id
+ order by financial_year_start desc limit 1
+ """
+ free_allowance_rows = db.session.execute(latest_annual_billing, {"service_id": row.id})
+ free_allowance = [x[0] for x in free_allowance_rows]
+ print("create free limit of {} for service: {}".format(free_allowance[0], row.id))
+ dao_create_or_update_annual_billing_for_year(
+ service_id=row.id,
+ free_sms_fragment_limit=free_allowance[0],
+ financial_year_start=int(year),
+ )
+
+
+@bulk_db_command(name="insert-inbound-numbers")
+@click.option(
+ "-f",
+ "--file_name",
+ required=True,
+ help="""Full path of the file to upload, file is a contains inbound numbers,
+ one number per line. The number must have the format of 07... not 447....""",
+)
+def insert_inbound_numbers_from_file(file_name):
+ print("Inserting inbound numbers from {}".format(file_name))
+ file = open(file_name)
+ sql = "insert into inbound_numbers values('{}', '{}', 'mmg', null, True, now(), null);"
+
+ for line in file:
+ print(line)
+ db.session.execute(sql.format(uuid.uuid4(), line.strip()))
+ db.session.commit()
+ file.close()
+
+
+@bulk_db_command(name="bulk-invite-user-to-service")
+@click.option(
+ "-f",
+ "--file_name",
+ required=True,
+ help="Full path of the file containing a list of email address for people to invite to a service",
+)
+@click.option(
+ "-s",
+ "--service_id",
+ required=True,
+ help="The id of the service that the invite is for",
+)
+@click.option("-u", "--user_id", required=True, help="The id of the user that the invite is from")
+@click.option(
+ "-a",
+ "--auth_type",
+ required=False,
+ help="The authentication type for the user, sms_auth or email_auth. Defaults to sms_auth if not provided",
+)
+@click.option("-p", "--permissions", required=True, help="Comma separated list of permissions.")
+def bulk_invite_user_to_service(file_name, service_id, user_id, auth_type, permissions):
+ # permissions
+ # manage_users | manage_templates | manage_settings
+ # send messages ==> send_texts | send_emails | send_letters
+ # Access API keys manage_api_keys
+ # platform_admin
+ # view_activity
+ # "send_texts,send_emails,send_letters,view_activity"
+ from app.invite.rest import create_invited_user
+
+ file = open(file_name)
+ for email_address in file:
+ data = {
+ "service": service_id,
+ "email_address": email_address.strip(),
+ "from_user": user_id,
+ "permissions": permissions,
+ "auth_type": auth_type,
+ "invite_link_host": current_app.config["ADMIN_BASE_URL"],
+ }
+ with current_app.test_request_context(
+ path="/service/{}/invite/".format(service_id),
+ method="POST",
+ data=json.dumps(data),
+ headers={"Content-Type": "application/json"},
+ ):
+ try:
+ response = create_invited_user(service_id)
+ if response[1] != 201:
+ print("*** ERROR occurred for email address: {}".format(email_address.strip()))
+ print(response[0].get_data(as_text=True))
+ except Exception as e:
+ print("*** ERROR occurred for email address: {}. \n{}".format(email_address.strip(), e))
+
+ file.close()
+
+
+@bulk_db_command(name="archive-jobs-created-between-dates")
+@click.option(
+ "-s",
+ "--start_date",
+ required=True,
+ help="start date inclusive",
+ type=click_dt(format="%Y-%m-%d"),
+)
+@click.option(
+ "-e",
+ "--end_date",
+ required=True,
+ help="end date inclusive",
+ type=click_dt(format="%Y-%m-%d"),
+)
+@statsd(namespace="tasks")
+def update_jobs_archived_flag(start_date, end_date):
+ current_app.logger.info("Archiving jobs created between {} to {}".format(start_date, end_date))
+
+ process_date = start_date
+ total_updated = 0
+
+ while process_date < end_date:
+ start_time = datetime.utcnow()
+ sql = """update
+ jobs set archived = true
+ where
+ created_at >= (date :start + time '00:00:00') at time zone 'America/Toronto'
+ at time zone 'UTC'
+ and created_at < (date :end + time '00:00:00') at time zone 'America/Toronto' at time zone 'UTC'"""
+
+ result = db.session.execute(sql, {"start": process_date, "end": process_date + timedelta(days=1)})
+ db.session.commit()
+ current_app.logger.info(
+ "jobs: --- Completed took {}ms. Archived {} jobs for {}".format(
+ datetime.now() - start_time, result.rowcount, process_date
+ )
+ )
+
+ process_date += timedelta(days=1)
+
+ total_updated += result.rowcount
+ current_app.logger.info("Total archived jobs = {}".format(total_updated))
+
+
+@bulk_db_command(name="populate-organisations-from-file")
+@click.option(
+ "-f",
+ "--file_name",
+ required=True,
+ help="Pipe delimited file containing organisation name, sector, crown, argeement_signed, domains",
+)
+def populate_organisations_from_file(file_name):
+ # [0] organisation name:: name of the organisation insert if organisation is missing.
+ # [1] sector:: Central | Local | NHS only
+ # [2] crown:: TRUE | FALSE only
+ # [3] argeement_signed:: TRUE | FALSE
+ # [4] domains:: comma separated list of domains related to the organisation
+ # [5] email branding name: name of the default email branding for the org
+ # [6] letter branding name: name of the default letter branding for the org
+
+ # The expectation is that the organisation, organisation_to_service
+ # and user_to_organisation will be cleared before running this command.
+ # Ignoring duplicates allows us to run the command again with the same file or same file with new rows.
+ with open(file_name, "r") as f:
+
+ def boolean_or_none(field):
+ if field == "1":
+ return True
+ elif field == "0":
+ return False
+ else:
+ return None
+
+ for line in itertools.islice(f, 1, None):
+ columns = line.split("|")
+ print(columns)
+ email_branding = None
+ email_branding_column = columns[5].strip()
+ if len(email_branding_column) > 0:
+ email_branding = EmailBranding.query.filter(EmailBranding.name == email_branding_column).one()
+ letter_branding = None
+ letter_branding_column = columns[6].strip()
+ if len(letter_branding_column) > 0:
+ letter_branding = LetterBranding.query.filter(LetterBranding.name == letter_branding_column).one()
+ data = {
+ "name": columns[0],
+ "active": True,
+ "agreement_signed": boolean_or_none(columns[3]),
+ "crown": boolean_or_none(columns[2]),
+ "organisation_type": columns[1].lower(),
+ "email_branding_id": email_branding.id if email_branding else None,
+ "letter_branding_id": letter_branding.id if letter_branding else None,
+ }
+ org = Organisation(**data)
+ try:
+ db.session.add(org)
+ db.session.commit()
+ except IntegrityError:
+ print("duplicate org", org.name)
+ db.session.rollback()
+ domains = columns[4].split(",")
+ for d in domains:
+ if len(d.strip()) > 0:
+ domain = Domain(domain=d.strip(), organisation_id=org.id)
+ try:
+ db.session.add(domain)
+ db.session.commit()
+ except IntegrityError:
+ print("duplicate domain", d.strip())
+ db.session.rollback()
+
+
+@bulk_db_command(name="associate-services-to-organisations")
+def associate_services_to_organisations():
+ services = Service.get_history_model().query.filter_by(version=1).all()
+
+ for s in services:
+ created_by_user = User.query.filter_by(id=s.created_by_id).first()
+ organisation = dao_get_organisation_by_email_address(created_by_user.email_address)
+ service = dao_fetch_service_by_id(service_id=s.id)
+ if organisation:
+ dao_add_service_to_organisation(service=service, organisation_id=organisation.id)
+
+ print("finished associating services to organisations")
diff --git a/app/commands.py b/app/commands/deprecated.py
similarity index 61%
rename from app/commands.py
rename to app/commands/deprecated.py
index 84b5ad829d..3aab168fe4 100644
--- a/app/commands.py
+++ b/app/commands/deprecated.py
@@ -1,9 +1,7 @@
import csv
import functools
import itertools
-import uuid
from datetime import datetime, timedelta
-from decimal import Decimal
import click
from click_datetime import Datetime as click_dt
@@ -11,71 +9,37 @@
from flask import current_app, json
from notifications_utils.statsd_decorators import statsd
from notifications_utils.template import SMSMessageTemplate
-from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
-from app import DATETIME_FORMAT, db, signer_delivery_status
+from app import db
from app.aws import s3
from app.celery.letters_pdf_tasks import create_letters_pdf
from app.celery.nightly_tasks import (
send_total_sent_notifications_to_performance_platform,
)
-from app.celery.service_callback_tasks import send_delivery_status_to_service
from app.config import QueueNames
-from app.dao.annual_billing_dao import dao_create_or_update_annual_billing_for_year
from app.dao.fact_billing_dao import (
delete_billing_data_for_service_for_day,
fetch_billing_data_for_day,
get_service_ids_that_need_billing_populated,
update_fact_billing,
)
-from app.dao.organisation_dao import (
- dao_add_service_to_organisation,
- dao_get_organisation_by_email_address,
-)
-from app.dao.provider_rates_dao import (
- create_provider_rates as dao_create_provider_rates,
-)
-from app.dao.service_callback_api_dao import (
- get_service_delivery_status_callback_api_for_service,
-)
-from app.dao.services_dao import (
- dao_fetch_all_services_by_user,
- dao_fetch_service_by_id,
- dao_update_service,
- delete_service_and_all_associated_db_objects,
-)
+from app.dao.services_dao import dao_fetch_service_by_id, dao_update_service
from app.dao.templates_dao import dao_get_template_by_id
-from app.dao.users_dao import (
- delete_model_user,
- delete_user_verify_codes,
- get_user_by_email,
-)
-from app.models import (
- KEY_TYPE_TEST,
- NOTIFICATION_CREATED,
- PROVIDERS,
- SMS_TYPE,
- Domain,
- EmailBranding,
- LetterBranding,
- Notification,
- Organisation,
- Service,
- User,
-)
+from app.dao.users_dao import get_user_by_email
+from app.models import KEY_TYPE_TEST, NOTIFICATION_CREATED, SMS_TYPE, Notification
from app.performance_platform.processing_time import (
send_processing_time_for_start_and_end,
)
from app.utils import get_local_timezone_midnight_in_utc, get_midnight_for_day_before
-@click.group(name="command", help="Additional commands")
-def command_group():
+@click.group(name="deprecated", help="Depricated commands")
+def deprecated_group():
pass
-class notify_command:
+class deprecated_command:
def __init__(self, name=None):
self.name = name
@@ -89,83 +53,16 @@ def __call__(self, func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
- command_group.add_command(wrapper)
+ deprecated_group.add_command(wrapper)
return wrapper
-@notify_command()
-@click.option("-p", "--provider_name", required=True, type=click.Choice(PROVIDERS))
-@click.option(
- "-c",
- "--cost",
- required=True,
- help="Cost (pence) per message including decimals",
- type=float,
-)
-@click.option("-d", "--valid_from", required=True, type=click_dt(format="%Y-%m-%dT%H:%M:%S"))
-def create_provider_rates(provider_name, cost, valid_from):
- """
- Backfill rates for a given provider
- """
- cost = Decimal(cost)
- dao_create_provider_rates(provider_name, valid_from, cost)
-
-
-@notify_command()
-@click.option(
- "-u",
- "--user_email_prefix",
- required=True,
- help="""
- Functional test user email prefix. eg "notify-test-preview"
-""",
-) # noqa
-def purge_functional_test_data(user_email_prefix):
- """
- Remove non-seeded functional test data
-
- users, services, etc. Give an email prefix. Probably "notify-test-preview".
- """
- users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all()
- for usr in users:
- # Make sure the full email includes a uuid in it
- # Just in case someone decides to use a similar email address.
- try:
- uuid.UUID(usr.email_address.split("@")[0].split("+")[1])
- except ValueError:
- print("Skipping {} as the user email doesn't contain a UUID.".format(usr.email_address))
- else:
- services = dao_fetch_all_services_by_user(usr.id)
- if services:
- for service in services:
- delete_service_and_all_associated_db_objects(service)
- else:
- delete_user_verify_codes(usr)
- delete_model_user(usr)
-
-
-@notify_command()
-def backfill_notification_statuses():
- """
- DEPRECATED. Populates notification_status.
-
- This will be used to populate the new `Notification._status_fkey` with the old
- `Notification._status_enum`
- """
- LIMIT = 250000
- subq = "SELECT id FROM notification_history WHERE notification_status is NULL LIMIT {}".format(LIMIT)
- update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq)
- result = db.session.execute(subq).fetchall()
-
- while len(result) > 0:
- db.session.execute(update)
- print("commit {} updates at {}".format(LIMIT, datetime.utcnow()))
- db.session.commit()
- result = db.session.execute(subq).fetchall()
+def setup_deprecated_commands(application):
+ application.cli.add_command(deprecated_group)
-@notify_command()
+@deprecated_command()
def update_notification_international_flag():
"""
DEPRECATED. Set notifications.international=false.
@@ -192,7 +89,7 @@ def update_notification_international_flag():
result_history = db.session.execute(subq_history).fetchall()
-@notify_command()
+@deprecated_command()
def fix_notification_statuses_not_in_sync():
"""
DEPRECATED.
@@ -225,7 +122,27 @@ def fix_notification_statuses_not_in_sync():
result = db.session.execute(subq_hist).fetchall()
-@notify_command()
+@deprecated_command()
+def backfill_notification_statuses():
+ """
+ DEPRECATED. Populates notification_status.
+
+ This will be used to populate the new `Notification._status_fkey` with the old
+ `Notification._status_enum`
+ """
+ LIMIT = 250000
+ subq = "SELECT id FROM notification_history WHERE notification_status is NULL LIMIT {}".format(LIMIT)
+ update = "UPDATE notification_history SET notification_status = status WHERE id in ({})".format(subq)
+ result = db.session.execute(subq).fetchall()
+
+ while len(result) > 0:
+ db.session.execute(update)
+ print("commit {} updates at {}".format(LIMIT, datetime.utcnow()))
+ db.session.commit()
+ result = db.session.execute(subq).fetchall()
+
+
+@deprecated_command()
@click.option(
"-s",
"--start_date",
@@ -260,7 +177,7 @@ def backfill_performance_platform_totals(start_date, end_date):
send_total_sent_notifications_to_performance_platform(process_date)
-@notify_command()
+@deprecated_command()
@click.option(
"-s",
"--start_date",
@@ -300,71 +217,7 @@ def backfill_processing_time(start_date, end_date):
send_processing_time_for_start_and_end(process_start_date, process_end_date)
-@notify_command(name="populate-annual-billing")
-@click.option(
- "-y",
- "--year",
- required=True,
- type=int,
- help="""The year to populate the annual billing data for, i.e. 2019""",
-)
-def populate_annual_billing(year):
- """
- add annual_billing for given year.
- """
- sql = """
- Select id from services where active = true
- except
- select service_id
- from annual_billing
- where financial_year_start = :year
- """
- services_without_annual_billing = db.session.execute(sql, {"year": year})
- for row in services_without_annual_billing:
- latest_annual_billing = """
- Select free_sms_fragment_limit
- from annual_billing
- where service_id = :service_id
- order by financial_year_start desc limit 1
- """
- free_allowance_rows = db.session.execute(latest_annual_billing, {"service_id": row.id})
- free_allowance = [x[0] for x in free_allowance_rows]
- print("create free limit of {} for service: {}".format(free_allowance[0], row.id))
- dao_create_or_update_annual_billing_for_year(
- service_id=row.id,
- free_sms_fragment_limit=free_allowance[0],
- financial_year_start=int(year),
- )
-
-
-@notify_command(name="list-routes")
-def list_routes():
- """List URLs of all application routes."""
- for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
- print("{:10} {}".format(", ".join(rule.methods - set(["OPTIONS", "HEAD"])), rule.rule))
-
-
-@notify_command(name="insert-inbound-numbers")
-@click.option(
- "-f",
- "--file_name",
- required=True,
- help="""Full path of the file to upload, file is a contains inbound numbers,
- one number per line. The number must have the format of 07... not 447....""",
-)
-def insert_inbound_numbers_from_file(file_name):
- print("Inserting inbound numbers from {}".format(file_name))
- file = open(file_name)
- sql = "insert into inbound_numbers values('{}', '{}', 'mmg', null, True, now(), null);"
-
- for line in file:
- print(line)
- db.session.execute(sql.format(uuid.uuid4(), line.strip()))
- db.session.commit()
- file.close()
-
-
-@notify_command(name="replay-create-pdf-letters")
+@deprecated_command(name="replay-create-pdf-letters")
@click.option(
"-n",
"--notification_id",
@@ -377,71 +230,7 @@ def replay_create_pdf_letters(notification_id):
create_letters_pdf.apply_async([str(notification_id)], queue=QueueNames.CREATE_LETTERS_PDF)
-@notify_command(name="replay-service-callbacks")
-@click.option(
- "-f",
- "--file_name",
- required=True,
- help="""Full path of the file to upload, file is a contains client references of
- notifications that need the status to be sent to the service.""",
-)
-@click.option(
- "-s",
- "--service_id",
- required=True,
- help="""The service that the callbacks are for""",
-)
-def replay_service_callbacks(file_name, service_id):
- print("Start send service callbacks for service: ", service_id)
- callback_api = get_service_delivery_status_callback_api_for_service(service_id=service_id)
- if not callback_api:
- print("Callback api was not found for service: {}".format(service_id))
- return
-
- errors = []
- notifications = []
- file = open(file_name)
-
- for ref in file:
- try:
- notification = Notification.query.filter_by(client_reference=ref.strip()).one()
- notifications.append(notification)
- except NoResultFound:
- errors.append("Reference: {} was not found in notifications.".format(ref))
-
- for e in errors:
- print(e)
- if errors:
- raise Exception("Some notifications for the given references were not found")
-
- for n in notifications:
- data = {
- "notification_id": str(n.id),
- "notification_client_reference": n.client_reference,
- "notification_to": n.to,
- "notification_status": n.status,
- "notification_created_at": n.created_at.strftime(DATETIME_FORMAT),
- "notification_updated_at": n.updated_at.strftime(DATETIME_FORMAT),
- "notification_sent_at": n.sent_at.strftime(DATETIME_FORMAT),
- "notification_type": n.notification_type,
- "service_callback_api_url": callback_api.url,
- "service_callback_api_bearer_token": callback_api.bearer_token,
- }
- signed_status_update = signer_delivery_status.sign(data)
- send_delivery_status_to_service.apply_async([str(n.id), signed_status_update], queue=QueueNames.CALLBACKS)
-
- print(
- "Replay service status for service: {}. Sent {} notification status updates to the queue".format(
- service_id, len(notifications)
- )
- )
-
-
-def setup_commands(application):
- application.cli.add_command(command_group)
-
-
-@notify_command(name="migrate-data-to-ft-billing")
+@deprecated_command(name="migrate-data-to-ft-billing")
@click.option(
"-s",
"--start_date",
@@ -535,7 +324,7 @@ def migrate_data_to_ft_billing(start_date, end_date):
current_app.logger.info("Total inserted/updated records = {}".format(total_updated))
-@notify_command(name="rebuild-ft-billing-for-day")
+@deprecated_command(name="rebuild-ft-billing-for-day")
@click.option("-s", "--service_id", required=False, type=click.UUID)
@click.option(
"-d",
@@ -572,7 +361,7 @@ def rebuild_ft_data(process_day, service):
rebuild_ft_data(day, row.service_id)
-@notify_command(name="migrate-data-to-ft-notification-status")
+@deprecated_command(name="migrate-data-to-ft-notification-status")
@click.option(
"-s",
"--start_date",
@@ -635,65 +424,7 @@ def migrate_data_to_ft_notification_status(start_date, end_date):
print("Total inserted/updated records = {}".format(total_updated))
-@notify_command(name="bulk-invite-user-to-service")
-@click.option(
- "-f",
- "--file_name",
- required=True,
- help="Full path of the file containing a list of email address for people to invite to a service",
-)
-@click.option(
- "-s",
- "--service_id",
- required=True,
- help="The id of the service that the invite is for",
-)
-@click.option("-u", "--user_id", required=True, help="The id of the user that the invite is from")
-@click.option(
- "-a",
- "--auth_type",
- required=False,
- help="The authentication type for the user, sms_auth or email_auth. Defaults to sms_auth if not provided",
-)
-@click.option("-p", "--permissions", required=True, help="Comma separated list of permissions.")
-def bulk_invite_user_to_service(file_name, service_id, user_id, auth_type, permissions):
- # permissions
- # manage_users | manage_templates | manage_settings
- # send messages ==> send_texts | send_emails | send_letters
- # Access API keys manage_api_keys
- # platform_admin
- # view_activity
- # "send_texts,send_emails,send_letters,view_activity"
- from app.invite.rest import create_invited_user
-
- file = open(file_name)
- for email_address in file:
- data = {
- "service": service_id,
- "email_address": email_address.strip(),
- "from_user": user_id,
- "permissions": permissions,
- "auth_type": auth_type,
- "invite_link_host": current_app.config["ADMIN_BASE_URL"],
- }
- with current_app.test_request_context(
- path="/service/{}/invite/".format(service_id),
- method="POST",
- data=json.dumps(data),
- headers={"Content-Type": "application/json"},
- ):
- try:
- response = create_invited_user(service_id)
- if response[1] != 201:
- print("*** ERROR occurred for email address: {}".format(email_address.strip()))
- print(response[0].get_data(as_text=True))
- except Exception as e:
- print("*** ERROR occurred for email address: {}. \n{}".format(email_address.strip(), e))
-
- file.close()
-
-
-@notify_command(name="populate-notification-postage")
+@deprecated_command(name="populate-notification-postage")
@click.option(
"-s",
"--start_date",
@@ -744,52 +475,7 @@ def populate_notification_postage(start_date):
current_app.logger.info("Total inserted/updated records = {}".format(total_updated))
-@notify_command(name="archive-jobs-created-between-dates")
-@click.option(
- "-s",
- "--start_date",
- required=True,
- help="start date inclusive",
- type=click_dt(format="%Y-%m-%d"),
-)
-@click.option(
- "-e",
- "--end_date",
- required=True,
- help="end date inclusive",
- type=click_dt(format="%Y-%m-%d"),
-)
-@statsd(namespace="tasks")
-def update_jobs_archived_flag(start_date, end_date):
- current_app.logger.info("Archiving jobs created between {} to {}".format(start_date, end_date))
-
- process_date = start_date
- total_updated = 0
-
- while process_date < end_date:
- start_time = datetime.utcnow()
- sql = """update
- jobs set archived = true
- where
- created_at >= (date :start + time '00:00:00') at time zone 'America/Toronto'
- at time zone 'UTC'
- and created_at < (date :end + time '00:00:00') at time zone 'America/Toronto' at time zone 'UTC'"""
-
- result = db.session.execute(sql, {"start": process_date, "end": process_date + timedelta(days=1)})
- db.session.commit()
- current_app.logger.info(
- "jobs: --- Completed took {}ms. Archived {} jobs for {}".format(
- datetime.now() - start_time, result.rowcount, process_date
- )
- )
-
- process_date += timedelta(days=1)
-
- total_updated += result.rowcount
- current_app.logger.info("Total archived jobs = {}".format(total_updated))
-
-
-@notify_command(name="update-emails-to-remove-gsi")
+@deprecated_command(name="update-emails-to-remove-gsi")
@click.option(
"-s",
"--service_id",
@@ -821,7 +507,7 @@ def update_emails_to_remove_gsi(service_id):
db.session.commit()
-@notify_command(name="replay-daily-sorted-count-files")
+@deprecated_command(name="replay-daily-sorted-count-files")
@click.option(
"-f",
"--file_extension",
@@ -839,75 +525,7 @@ def replay_daily_sorted_count_files(file_extension):
print("Create task to record daily sorted counts for file: ", filename)
-@notify_command(name="populate-organisations-from-file")
-@click.option(
- "-f",
- "--file_name",
- required=True,
- help="Pipe delimited file containing organisation name, sector, crown, argeement_signed, domains",
-)
-def populate_organisations_from_file(file_name):
- # [0] organisation name:: name of the organisation insert if organisation is missing.
- # [1] sector:: Central | Local | NHS only
- # [2] crown:: TRUE | FALSE only
- # [3] argeement_signed:: TRUE | FALSE
- # [4] domains:: comma separated list of domains related to the organisation
- # [5] email branding name: name of the default email branding for the org
- # [6] letter branding name: name of the default letter branding for the org
-
- # The expectation is that the organisation, organisation_to_service
- # and user_to_organisation will be cleared before running this command.
- # Ignoring duplicates allows us to run the command again with the same file or same file with new rows.
- with open(file_name, "r") as f:
-
- def boolean_or_none(field):
- if field == "1":
- return True
- elif field == "0":
- return False
- elif field == "":
- return None
-
- for line in itertools.islice(f, 1, None):
- columns = line.split("|")
- print(columns)
- email_branding = None
- email_branding_column = columns[5].strip()
- if len(email_branding_column) > 0:
- email_branding = EmailBranding.query.filter(EmailBranding.name == email_branding_column).one()
- letter_branding = None
- letter_branding_column = columns[6].strip()
- if len(letter_branding_column) > 0:
- letter_branding = LetterBranding.query.filter(LetterBranding.name == letter_branding_column).one()
- data = {
- "name": columns[0],
- "active": True,
- "agreement_signed": boolean_or_none(columns[3]),
- "crown": boolean_or_none(columns[2]),
- "organisation_type": columns[1].lower(),
- "email_branding_id": email_branding.id if email_branding else None,
- "letter_branding_id": letter_branding.id if letter_branding else None,
- }
- org = Organisation(**data)
- try:
- db.session.add(org)
- db.session.commit()
- except IntegrityError:
- print("duplicate org", org.name)
- db.session.rollback()
- domains = columns[4].split(",")
- for d in domains:
- if len(d.strip()) > 0:
- domain = Domain(domain=d.strip(), organisation_id=org.id)
- try:
- db.session.add(domain)
- db.session.commit()
- except IntegrityError:
- print("duplicate domain", d.strip())
- db.session.rollback()
-
-
-@notify_command(name="get-letter-details-from-zips-sent-file")
+@deprecated_command(name="get-letter-details-from-zips-sent-file")
@click.argument("file_paths", required=True, nargs=-1)
@statsd(namespace="tasks")
def get_letter_details_from_zips_sent_file(file_paths):
@@ -943,21 +561,7 @@ def get_letter_details_from_zips_sent_file(file_paths):
csv_writer.writerow(row)
-@notify_command(name="associate-services-to-organisations")
-def associate_services_to_organisations():
- services = Service.get_history_model().query.filter_by(version=1).all()
-
- for s in services:
- created_by_user = User.query.filter_by(id=s.created_by_id).first()
- organisation = dao_get_organisation_by_email_address(created_by_user.email_address)
- service = dao_fetch_service_by_id(service_id=s.id)
- if organisation:
- dao_add_service_to_organisation(service=service, organisation_id=organisation.id)
-
- print("finished associating services to organisations")
-
-
-@notify_command(name="populate-service-volume-intentions")
+@deprecated_command(name="populate-service-volume-intentions")
@click.option(
"-f",
"--file_name",
@@ -982,7 +586,7 @@ def populate_service_volume_intentions(file_name):
print("populate-service-volume-intentions complete")
-@notify_command(name="populate-go-live")
+@deprecated_command(name="populate-go-live")
@click.option("-f", "--file_name", required=True, help="CSV file containing live service data")
def populate_go_live(file_name):
# 0 - count, 1- Link, 2- Service ID, 3- DEPT, 4- Service Name, 5- Main contact,
@@ -1021,7 +625,7 @@ def populate_go_live(file_name):
dao_update_service(service)
-@notify_command(name="fix-billable-units")
+@deprecated_command(name="fix-billable-units")
def fix_billable_units():
query = Notification.query.filter(
Notification.notification_type == SMS_TYPE,
diff --git a/app/commands/support.py b/app/commands/support.py
new file mode 100644
index 0000000000..f48444b07a
--- /dev/null
+++ b/app/commands/support.py
@@ -0,0 +1,126 @@
+import functools
+
+import click
+from flask import cli as flask_cli
+from flask import current_app
+from sqlalchemy.orm.exc import NoResultFound
+
+from app import DATETIME_FORMAT, db, signer_delivery_status
+from app.celery.service_callback_tasks import send_delivery_status_to_service
+from app.config import QueueNames
+from app.dao.service_callback_api_dao import (
+ get_service_delivery_status_callback_api_for_service,
+)
+from app.models import Notification, User
+
+
+@click.group(name="support", help="Support commands")
+def support_group():
+ pass
+
+
+class support_command:
+ def __init__(self, name=None):
+ self.name = name
+
+ def __call__(self, func):
+ # we need to call the flask with_appcontext decorator to ensure the config is loaded, db connected etc etc.
+ # we also need to use functools.wraps to carry through the names and docstrings etc of the functions.
+ # Then we need to turn it into a click.Command - that's what command_group.add_command expects.
+ @click.command(name=self.name)
+ @functools.wraps(func)
+ @flask_cli.with_appcontext
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+
+ support_group.add_command(wrapper)
+
+ return wrapper
+
+
+def setup_support_commands(application):
+ application.cli.add_command(support_group)
+
+
+@support_command(name="admin")
+@click.option("-u", "--user_email", required=True, help="user email address")
+@click.option("--on/--off", required=False, default=True, show_default="on", help="set admin on or off")
+def toggle_admin(user_email, on):
+ """
+ Set a user to be a platform admin or not
+ """
+ try:
+ user = User.query.filter(User.email_address == user_email).one()
+ except NoResultFound:
+ print(f"User {user_email} not found")
+ return
+ user.platform_admin = on
+ db.session.commit()
+ print(f"User {user.email_address} is now {'an admin' if user.platform_admin else 'not an admin'}")
+
+
+@support_command(name="list-routes")
+def list_routes():
+ """List URLs of all application routes."""
+ for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
+ print("{:10} {}".format(", ".join(rule.methods - set(["OPTIONS", "HEAD"])), rule.rule))
+
+
+@support_command(name="replay-service-callbacks")
+@click.option(
+ "-f",
+ "--file_name",
+ required=True,
+ help="""Full path of the file to upload, file is a contains client references of
+ notifications that need the status to be sent to the service.""",
+)
+@click.option(
+ "-s",
+ "--service_id",
+ required=True,
+ help="""The service that the callbacks are for""",
+)
+def replay_service_callbacks(file_name, service_id):
+ print("Start send service callbacks for service: ", service_id)
+ callback_api = get_service_delivery_status_callback_api_for_service(service_id=service_id)
+ if not callback_api:
+ print("Callback api was not found for service: {}".format(service_id))
+ return
+
+ errors = []
+ notifications = []
+ file = open(file_name)
+
+ for ref in file:
+ try:
+ notification = Notification.query.filter_by(client_reference=ref.strip()).one()
+ notifications.append(notification)
+ except NoResultFound:
+ errors.append("Reference: {} was not found in notifications.".format(ref))
+
+ for e in errors:
+ print(e)
+ if errors:
+ raise Exception("Some notifications for the given references were not found")
+
+ for n in notifications:
+ data = {
+ "notification_id": str(n.id),
+ "notification_client_reference": n.client_reference,
+ "notification_to": n.to,
+ "notification_status": n.status,
+ "notification_created_at": n.created_at.strftime(DATETIME_FORMAT),
+ "notification_updated_at": n.updated_at.strftime(DATETIME_FORMAT),
+ "notification_sent_at": n.sent_at.strftime(DATETIME_FORMAT),
+ "notification_type": n.notification_type,
+ "service_callback_api_url": callback_api.url,
+ "service_callback_api_bearer_token": callback_api.bearer_token,
+ }
+ signed_status_update = signer_delivery_status.sign(data)
+ send_delivery_status_to_service.apply_async([str(n.id), signed_status_update], queue=QueueNames.CALLBACKS)
+
+ print(
+ "Replay service status for service: {}. Sent {} notification status updates to the queue".format(
+ service_id, len(notifications)
+ )
+ )
diff --git a/app/commands/test_data.py b/app/commands/test_data.py
new file mode 100644
index 0000000000..929c3a006a
--- /dev/null
+++ b/app/commands/test_data.py
@@ -0,0 +1,73 @@
+import functools
+import uuid
+
+import click
+from flask import cli as flask_cli
+
+from app.dao.services_dao import (
+ dao_fetch_all_services_by_user,
+ delete_service_and_all_associated_db_objects,
+)
+from app.dao.users_dao import delete_model_user, delete_user_verify_codes
+from app.models import User
+
+
+@click.group(name="test-data", help="Generate and destroy test data")
+def test_data_group():
+ pass
+
+
+class test_data_command:
+ def __init__(self, name=None):
+ self.name = name
+
+ def __call__(self, func):
+ # we need to call the flask with_appcontext decorator to ensure the config is loaded, db connected etc etc.
+ # we also need to use functools.wraps to carry through the names and docstrings etc of the functions.
+ # Then we need to turn it into a click.Command - that's what command_group.add_command expects.
+ @click.command(name=self.name)
+ @functools.wraps(func)
+ @flask_cli.with_appcontext
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+
+ test_data_group.add_command(wrapper)
+
+ return wrapper
+
+
+def setup_test_data_commands(application):
+ application.cli.add_command(test_data_group)
+
+
+@test_data_command()
+@click.option(
+ "-u",
+ "--user_email_prefix",
+ required=True,
+ help="""
+ Functional test user email prefix. eg "notify-test-preview"
+""",
+) # noqa
+def purge_functional_test_data(user_email_prefix):
+ """
+ Remove non-seeded functional test data
+
+ users, services, etc. Give an email prefix. Probably "notify-test-preview".
+ """
+ users = User.query.filter(User.email_address.like("{}%".format(user_email_prefix))).all()
+ for usr in users:
+ # Make sure the full email includes a uuid in it
+ # Just in case someone decides to use a similar email address.
+ try:
+ uuid.UUID(usr.email_address.split("@")[0].split("+")[1])
+ except ValueError:
+ print("Skipping {} as the user email doesn't contain a UUID.".format(usr.email_address))
+ else:
+ services = dao_fetch_all_services_by_user(usr.id)
+ if services:
+ for service in services:
+ delete_service_and_all_associated_db_objects(service)
+ else:
+ delete_user_verify_codes(usr)
+ delete_model_user(usr)
diff --git a/app/config.py b/app/config.py
index 341fba9d75..b170e77add 100644
--- a/app/config.py
+++ b/app/config.py
@@ -310,6 +310,9 @@ class Config(object):
DAILY_EMAIL_LIMIT_UPDATED_TEMPLATE_ID = "97dade64-ea8d-460f-8a34-900b74ee5eb0"
DAILY_LIMIT_UPDATED_TEMPLATE_ID = "b3c766e6-be32-4edf-b8db-0f04ef404edc"
DAILY_SMS_LIMIT_UPDATED_TEMPLATE_ID = "6ec12dd0-680a-4073-8d58-91d17cc8442f"
+ DEFAULT_TEMPLATE_CATEGORY_LOW = "0dda24c2-982a-4f44-9749-0e38b2607e89"
+ DEFAULT_TEMPLATE_CATEGORY_MEDIUM = "f75d6706-21b7-437e-b93a-2c0ab771e28e"
+ DEFAULT_TEMPLATE_CATEGORY_HIGH = "c4f87d7c-a55b-4c0f-91fe-e56c65bb1871"
EMAIL_2FA_TEMPLATE_ID = "299726d2-dba6-42b8-8209-30e1d66ea164"
EMAIL_MAGIC_LINK_TEMPLATE_ID = "6e97fd09-6da0-4cc8-829d-33cf5b818103"
FORCED_PASSWORD_RESET_TEMPLATE_ID = "e9a65a6b-497b-42f2-8f43-1736e43e13b3"
@@ -319,6 +322,7 @@ class Config(object):
HEARTBEAT_TEMPLATE_SMS_HIGH = "4969a9e9-ddfd-476e-8b93-6231e6f1be4a"
HEARTBEAT_TEMPLATE_SMS_LOW = "ab3a603b-d602-46ea-8c83-e05cb280b950"
HEARTBEAT_TEMPLATE_SMS_MEDIUM = "a48b54ce-40f6-4e4a-abe8-1e2fa389455b"
+ HEARTBEAT_TEMPLATE_SMS_HIGH = "4969a9e9-ddfd-476e-8b93-6231e6f1be4a"
INVITATION_EMAIL_TEMPLATE_ID = "4f46df42-f795-4cc4-83bb-65ca312f49cc"
MOU_NOTIFY_TEAM_ALERT_TEMPLATE_ID = "d0e66c4c-0c50-43f0-94f5-f85b613202d4"
MOU_SIGNED_ON_BEHALF_ON_BEHALF_RECEIPT_TEMPLATE_ID = "522b6657-5ca5-4368-a294-6b527703bd0b"
@@ -560,6 +564,7 @@ class Config(object):
FF_CELERY_CUSTOM_TASK_PARAMS = env.bool("FF_CELERY_CUSTOM_TASK_PARAMS", True)
FF_CLOUDWATCH_METRICS_ENABLED = env.bool("FF_CLOUDWATCH_METRICS_ENABLED", False)
FF_SALESFORCE_CONTACT = env.bool("FF_SALESFORCE_CONTACT", False)
+ FF_TEMPLATE_CATEGORY = env.bool("FF_TEMPLATE_CATEGORY", False)
# SRE Tools auth keys
SRE_USER_NAME = "SRE_CLIENT_USER"
diff --git a/app/dao/template_categories_dao.py b/app/dao/template_categories_dao.py
new file mode 100644
index 0000000000..ea584df8ba
--- /dev/null
+++ b/app/dao/template_categories_dao.py
@@ -0,0 +1,89 @@
+import uuid
+from datetime import datetime
+
+from flask import current_app
+
+from app import db
+from app.dao.dao_utils import transactional
+from app.errors import InvalidRequest
+from app.models import Template, TemplateCategory
+
+
+@transactional
+def dao_create_template_category(template_category: TemplateCategory):
+ if template_category.id is None:
+ template_category.id = uuid.uuid4()
+ db.session.add(template_category)
+
+
+def dao_get_template_category_by_id(template_category_id) -> TemplateCategory:
+ return TemplateCategory.query.filter_by(id=template_category_id).one()
+
+
+def dao_get_template_category_by_template_id(template_id) -> TemplateCategory:
+ return Template.query.filter_by(id=template_id).one().template_category
+
+
+# TODO: Add filters: Select all template categories used by at least 1 sms/email template
+def dao_get_all_template_categories(template_type=None, hidden=None):
+ query = TemplateCategory.query
+
+ if template_type is not None:
+ query = query.join(Template).filter(Template.template_type == template_type)
+
+ if hidden is not None:
+ query = query.filter(TemplateCategory.hidden == hidden)
+
+ return query.all()
+
+
+@transactional
+def dao_update_template_category(template_category: TemplateCategory):
+ db.session.add(template_category)
+ db.session.commit()
+
+
+@transactional
+def dao_delete_template_category_by_id(template_category_id, cascade=False):
+ """
+ Deletes a `TemplateCategory`. By default, if the `TemplateCategory` is associated with any `Template`, it will not be deleted.
+ If the `cascade` option is specified then the category will be forcible removed:
+ 1. The `Category` will be dissociated from templates that use it
+ 2. The `Template` is assigned to one of the default categories that matches the priority of the deleted category
+ 3. Finally the `Category` will be deleted
+
+ Args:
+ template_category_id (str): The id of the template_category to delete
+ cascade (bool, optional): Specify whether to dissociate the category from templates that use it to force removal. Defaults to False.
+ """
+ template_category = dao_get_template_category_by_id(template_category_id)
+ templates = Template.query.filter_by(template_category_id=template_category_id).all()
+
+ if templates and not cascade:
+ raise InvalidRequest(
+ "Cannot delete categories associated with templates. Dissociate the category from templates first.", 400
+ )
+
+ if templates and cascade:
+ # When there are templates and we are cascading, we set the category to a default
+ # that matches the template's previous category's priority
+ for template in templates:
+ # Get the a default category that matches the previous priority of the template, based on template type
+ default_category_id = _get_default_category_id(
+ template_category.sms_process_type if template.template_type == "sms" else template_category.email_process_type
+ )
+ template.template_category_id = default_category_id
+ template.updated_at = datetime.utcnow()
+ db.session.add(template)
+ db.session.commit()
+
+ db.session.delete(template_category)
+
+
+def _get_default_category_id(process_type):
+ default_categories = {
+ "bulk": current_app.config["DEFAULT_TEMPLATE_CATEGORY_LOW"],
+ "normal": current_app.config["DEFAULT_TEMPLATE_CATEGORY_MEDIUM"],
+ "priority": current_app.config["DEFAULT_TEMPLATE_CATEGORY_HIGH"],
+ }
+ return default_categories.get(process_type, current_app.config["DEFAULT_TEMPLATE_CATEGORY_LOW"])
diff --git a/app/dao/templates_dao.py b/app/dao/templates_dao.py
index 822296cae4..8f06080986 100644
--- a/app/dao/templates_dao.py
+++ b/app/dao/templates_dao.py
@@ -78,6 +78,71 @@ def dao_update_template_reply_to(template_id, reply_to):
return template
+@transactional
+def dao_update_template_process_type(template_id, process_type):
+ Template.query.filter_by(id=template_id).update(
+ {
+ "process_type": process_type,
+ }
+ )
+ template = Template.query.filter_by(id=template_id).one()
+
+ history = TemplateHistory(
+ **{
+ "id": template.id,
+ "name": template.name,
+ "template_type": template.template_type,
+ "created_at": template.created_at,
+ "updated_at": template.updated_at,
+ "content": template.content,
+ "service_id": template.service_id,
+ "subject": template.subject,
+ "postage": template.postage,
+ "created_by_id": template.created_by_id,
+ "version": template.version,
+ "archived": template.archived,
+ "process_type": template.process_type,
+ "service_letter_contact_id": template.service_letter_contact_id,
+ }
+ )
+ db.session.add(history)
+ return template
+
+
+@transactional
+def dao_update_template_category(template_id, category_id):
+ Template.query.filter_by(id=template_id).update(
+ {
+ "template_category_id": category_id,
+ "updated_at": datetime.utcnow(),
+ "version": Template.version + 1,
+ }
+ )
+
+ template = Template.query.filter_by(id=template_id).one()
+
+ history = TemplateHistory(
+ **{
+ "id": template.id,
+ "name": template.name,
+ "template_type": template.template_type,
+ "created_at": template.created_at,
+ "updated_at": template.updated_at,
+ "content": template.content,
+ "service_id": template.service_id,
+ "subject": template.subject,
+ "postage": template.postage,
+ "created_by_id": template.created_by_id,
+ "version": template.version,
+ "archived": template.archived,
+ "process_type": template.process_type,
+ "service_letter_contact_id": template.service_letter_contact_id,
+ }
+ )
+ db.session.add(history)
+ return template
+
+
@transactional
def dao_redact_template(template, user_id):
template.template_redacted.redact_personalisation = True
diff --git a/app/delivery/send_to_providers.py b/app/delivery/send_to_providers.py
index 5ef24c0769..5bcaed1f34 100644
--- a/app/delivery/send_to_providers.py
+++ b/app/delivery/send_to_providers.py
@@ -22,12 +22,14 @@
from app import bounce_rate_client, clients, document_download_client, statsd_client
from app.celery.research_mode_tasks import send_email_response, send_sms_response
+from app.clients.sms import SmsSendingVehicles
from app.config import Config
from app.dao.notifications_dao import dao_update_notification
from app.dao.provider_details_dao import (
dao_toggle_sms_provider,
get_provider_details_by_notification_type,
)
+from app.dao.template_categories_dao import dao_get_template_category_by_id
from app.dao.templates_dao import dao_get_template_by_id
from app.exceptions import (
DocumentDownloadException,
@@ -43,6 +45,7 @@
EMAIL_TYPE,
KEY_TYPE_TEST,
NOTIFICATION_CONTAINS_PII,
+ NOTIFICATION_PERMANENT_FAILURE,
NOTIFICATION_SENDING,
NOTIFICATION_SENT,
NOTIFICATION_TECHNICAL_FAILURE,
@@ -103,12 +106,21 @@ def send_sms_to_provider(notification):
else:
try:
+ template_category_id = template_dict.get("template_category_id")
+ if current_app.config["FF_TEMPLATE_CATEGORY"] and template_category_id is not None:
+ sending_vehicle = SmsSendingVehicles(
+ dao_get_template_category_by_id(template_category_id).sms_sending_vehicle
+ )
+ else:
+ sending_vehicle = None
reference = provider.send_sms(
to=validate_and_format_phone_number(notification.to, international=notification.international),
content=str(template),
reference=str(notification.id),
sender=notification.reply_to_text,
template_id=notification.template_id,
+ service_id=notification.service_id,
+ sending_vehicle=sending_vehicle,
)
except Exception as e:
notification.billable_units = template.fragment_count
@@ -118,7 +130,10 @@ def send_sms_to_provider(notification):
else:
notification.reference = reference
notification.billable_units = template.fragment_count
- update_notification_to_sending(notification, provider)
+ if reference == "opted_out":
+ update_notification_to_opted_out(notification, provider)
+ else:
+ update_notification_to_sending(notification, provider)
# Record StatsD stats to compute SLOs
statsd_client.timing_with_dates("sms.total-time", notification.sent_at, notification.created_at)
@@ -340,6 +355,14 @@ def update_notification_to_sending(notification, provider):
dao_update_notification(notification)
+def update_notification_to_opted_out(notification, provider):
+ notification.sent_at = datetime.utcnow()
+ notification.sent_by = provider.get_name()
+ notification.status = NOTIFICATION_PERMANENT_FAILURE
+ notification.provider_response = "Phone number is opted out"
+ dao_update_notification(notification)
+
+
def provider_to_use(
notification_type: str,
notification_id: UUID,
@@ -350,13 +373,13 @@ def provider_to_use(
) -> Any:
"""
Get the provider to use for sending the notification.
- SMS that are being sent with a dedicated number or to a US number should not use Pinpoint.
+ SMS that are being sent with a dedicated number or internationally should not use Pinpoint.
Args:
notification_type (str): SMS or EMAIL.
notification_id (UUID): id of notification. Just used for logging.
to (str, optional): recipient. Defaults to None.
- international (bool, optional): Recipient is international. Defaults to False.
+ international (bool, optional): Flags whether or not the message recipient is outside Zone 1 (US / Canada / Caribbean). Defaults to False.
sender (str, optional): reply_to_text to use. Defaults to None.
template_id (str, optional): template_id to use. Defaults to None.
@@ -368,17 +391,26 @@ def provider_to_use(
"""
has_dedicated_number = sender is not None and sender.startswith("+1")
+ cannot_determine_recipient_country = False
+ recipient_outside_canada = False
sending_to_us_number = False
if to is not None:
match = next(iter(phonenumbers.PhoneNumberMatcher(to, "US")), None)
- if match and phonenumbers.region_code_for_number(match.number) == "US":
+ if match is None:
+ cannot_determine_recipient_country = True
+ elif (
+ phonenumbers.region_code_for_number(match.number) == "US"
+ ): # The US is a special case that needs to send from a US toll free number
sending_to_us_number = True
-
+ elif phonenumbers.region_code_for_number(match.number) != "CA":
+ recipient_outside_canada = True
using_sc_pool_template = template_id is not None and str(template_id) in current_app.config["AWS_PINPOINT_SC_TEMPLATE_IDS"]
-
+ zone_1_outside_canada = recipient_outside_canada and not international
do_not_use_pinpoint = (
has_dedicated_number
or sending_to_us_number
+ or cannot_determine_recipient_country
+ or zone_1_outside_canada
or not current_app.config["AWS_PINPOINT_SC_POOL_ID"]
or ((not current_app.config["AWS_PINPOINT_DEFAULT_POOL_ID"]) and not using_sc_pool_template)
)
diff --git a/app/encryption.py b/app/encryption.py
index 3e9de77f55..2622fcd938 100644
--- a/app/encryption.py
+++ b/app/encryption.py
@@ -61,7 +61,7 @@ def sign_with_all_keys(self, to_sign: str | NotificationDictToSign) -> List[str
Returns:
List[str | bytes]: A list of signed values.
"""
- signed = []
+ signed: list[str | bytes] = []
for k in reversed(self.secret_key): # reversed so that the default key is last
signed.append(URLSafeSerializer(k).dumps(to_sign, salt=self.salt))
return signed
diff --git a/app/models.py b/app/models.py
index f79867918e..74000e2b09 100644
--- a/app/models.py
+++ b/app/models.py
@@ -38,6 +38,7 @@
signer_inbound_sms,
signer_personalisation,
)
+from app.clients.sms import SmsSendingVehicles
from app.encryption import check_hash, hashpw
from app.history_meta import Versioned
@@ -65,6 +66,8 @@
COMPLAINT_CALLBACK_TYPE = "complaint"
SERVICE_CALLBACK_TYPES = [DELIVERY_STATUS_CALLBACK_TYPE, COMPLAINT_CALLBACK_TYPE]
+sms_sending_vehicles = db.Enum(*[vehicle.value for vehicle in SmsSendingVehicles], name="sms_sending_vehicles")
+
def filter_null_value_fields(obj):
return dict(filter(lambda x: x[1] is not None, obj.items()))
@@ -1033,6 +1036,42 @@ def get_users_with_permission(self):
PRECOMPILED_TEMPLATE_NAME = "Pre-compiled PDF"
+class TemplateCategory(BaseModel):
+ __tablename__ = "template_categories"
+
+ id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
+ name_en = db.Column(db.String(255), unique=True, nullable=False)
+ name_fr = db.Column(db.String(255), unique=True, nullable=False)
+ description_en = db.Column(db.String(200), nullable=True)
+ description_fr = db.Column(db.String(200), nullable=True)
+ sms_process_type = db.Column(db.String(200), nullable=False)
+ email_process_type = db.Column(db.String(200), nullable=False)
+ hidden = db.Column(db.Boolean, nullable=False, default=False)
+ created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated_at = db.Column(db.DateTime, onupdate=datetime.datetime.utcnow)
+ sms_sending_vehicle = db.Column(sms_sending_vehicles, nullable=False, default="long_code")
+
+ def serialize(self):
+ return {
+ "id": self.id,
+ "name_en": self.name_en,
+ "name_fr": self.name_fr,
+ "description_en": self.description_en,
+ "description_fr": self.description_fr,
+ "sms_process_type": self.sms_process_type,
+ "email_process_type": self.email_process_type,
+ "hidden": self.hidden,
+ "created_at": self.created_at,
+ "updated_at": self.updated_at,
+ "sms_sending_vehicle": self.sms_sending_vehicle,
+ }
+
+ @classmethod
+ def from_json(cls, data):
+ fields = data.copy()
+ return cls(**fields)
+
+
class TemplateBase(BaseModel):
__abstract__ = True
@@ -1078,6 +1117,14 @@ def service_id(cls):
def created_by_id(cls):
return db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), index=True, nullable=False)
+ @declared_attr
+ def template_category_id(cls):
+ return db.Column(UUID(as_uuid=True), db.ForeignKey("template_categories.id"), index=True, nullable=True)
+
+ @declared_attr
+ def template_category(cls):
+ return db.relationship("TemplateCategory", primaryjoin="Template.template_category_id == TemplateCategory.id")
+
@declared_attr
def created_by(cls):
return db.relationship("User")
@@ -1088,7 +1135,7 @@ def process_type(cls):
db.String(255),
db.ForeignKey("template_process_type.name"),
index=True,
- nullable=False,
+ nullable=True,
default=NORMAL,
)
@@ -1198,6 +1245,17 @@ def get_link(self):
_external=True,
)
+ @property
+ def template_process_type(self):
+ """By default we use the process_type from TemplateCategory, but allow admins to override it on a per-template basis.
+ Only when overriden do we use the process_type from the template itself.
+ """
+ if self.template_type == SMS_TYPE:
+ return self.process_type if self.process_type else self.template_categories.sms_process_type
+ elif self.template_type == EMAIL_TYPE:
+ return self.process_type if self.process_type else self.template_categories.email_process_type
+ return self.process_type
+
@classmethod
def from_json(cls, data, folder=None):
"""
@@ -1258,6 +1316,10 @@ def from_json(cls, data):
fields.pop("folder", None)
return super(TemplateHistory, cls).from_json(fields)
+ @declared_attr
+ def template_category(cls):
+ return db.relationship("TemplateCategory", primaryjoin="TemplateHistory.template_category_id == TemplateCategory.id")
+
@declared_attr
def template_redacted(cls):
return db.relationship(
diff --git a/app/schemas.py b/app/schemas.py
index 75a6a2e78f..9beae6ab9f 100644
--- a/app/schemas.py
+++ b/app/schemas.py
@@ -107,6 +107,31 @@ def make_instance(self, data, **kwargs):
return super(BaseSchema, self).make_instance(data)
+class TemplateCategorySchema(BaseSchema):
+ class Meta(BaseSchema.Meta):
+ model = models.TemplateCategory
+
+ @validates("name_en")
+ def validate_name_en(self, value):
+ if not value:
+ raise ValidationError("Invalid name")
+
+ @validates("name_fr")
+ def validate_name_fr(self, value):
+ if not value:
+ raise ValidationError("Invalid name")
+
+ @validates("sms_process_type")
+ def validate_sms_process_type(self, value):
+ if value not in models.TEMPLATE_PROCESS_TYPE:
+ raise ValidationError("Invalid SMS process type")
+
+ @validates("email_process_type")
+ def validate_email_process_type(self, value):
+ if value not in models.TEMPLATE_PROCESS_TYPE:
+ raise ValidationError("Invalid email process type")
+
+
class UserSchema(BaseSchema):
permissions = fields.Method("user_permissions", dump_only=True)
password_changed_at = field_for(models.User, "password_changed_at", format="%Y-%m-%d %H:%M:%S.%f")
@@ -371,6 +396,8 @@ class TemplateSchema(BaseTemplateSchema):
created_by = field_for(models.Template, "created_by", required=True)
is_precompiled_letter = fields.Method("get_is_precompiled_letter")
process_type = field_for(models.Template, "process_type")
+ template_category = fields.Nested(TemplateCategorySchema, dump_only=True)
+ template_category_id = fields.UUID(required=False, allow_none=True)
redact_personalisation = fields.Method("redact")
created_at = FlexibleDateTime()
updated_at = FlexibleDateTime()
@@ -389,10 +416,17 @@ def validate_type(self, data, **kwargs):
raise ValidationError("Invalid template subject", "subject")
+class ReducedTemplateSchema(TemplateSchema):
+ class Meta(BaseSchema.Meta):
+ model = models.Template
+ exclude = ["content", "jobs", "service_id", "service_letter_contact_id"]
+
+
class TemplateHistorySchema(BaseSchema):
reply_to = fields.Method("get_reply_to", allow_none=True)
reply_to_text = fields.Method("get_reply_to_text", allow_none=True)
process_type = field_for(models.Template, "process_type")
+ template_category = fields.Nested(TemplateCategorySchema, dump_only=True)
created_by = fields.Nested(UserSchema, only=["id", "name", "email_address"], dump_only=True)
created_at = field_for(models.Template, "created_at", format="%Y-%m-%d %H:%M:%S.%f")
updated_at = FlexibleDateTime()
@@ -805,6 +839,8 @@ def validate_archived(self, data, **kwargs):
service_history_schema = ServiceHistorySchema()
api_key_history_schema = ApiKeyHistorySchema()
template_history_schema = TemplateHistorySchema()
+template_category_schema = TemplateCategorySchema()
+reduced_template_schema = ReducedTemplateSchema()
event_schema = EventSchema()
provider_details_schema = ProviderDetailsSchema()
provider_details_history_schema = ProviderDetailsHistorySchema()
diff --git a/app/template/rest.py b/app/template/rest.py
index 46e5348dbf..789ffbefa3 100644
--- a/app/template/rest.py
+++ b/app/template/rest.py
@@ -25,6 +25,8 @@
dao_get_template_versions,
dao_redact_template,
dao_update_template,
+ dao_update_template_category,
+ dao_update_template_process_type,
dao_update_template_reply_to,
get_precompiled_letter_template,
)
@@ -40,7 +42,11 @@
)
from app.notifications.validators import check_reply_to, service_has_permission
from app.schema_validation import validate
-from app.schemas import template_history_schema, template_schema
+from app.schemas import (
+ reduced_template_schema,
+ template_history_schema,
+ template_schema,
+)
from app.template.template_schemas import post_create_template_schema
from app.utils import get_public_notify_type_text, get_template_instance
@@ -132,6 +138,24 @@ def create_template(service_id):
return jsonify(data=template_schema.dump(new_template)), 201
+@template_blueprint.route("//category/", methods=["POST"])
+def update_templates_category(service_id, template_id, template_category_id):
+ updated = dao_update_template_category(template_id, template_category_id)
+ return jsonify(data=template_schema.dump(updated)), 200
+
+
+@template_blueprint.route("//process-type", methods=["POST"])
+def update_template_process_type(template_id):
+ data = request.get_json()
+ if "process_type" not in data:
+ message = "Field is required"
+ errors = {"process_type": [message]}
+ raise InvalidRequest(errors, status_code=400)
+
+ updated = dao_update_template_process_type(template_id=template_id, process_type=data.get("process_type"))
+ return jsonify(data=template_schema.dump(updated)), 200
+
+
@template_blueprint.route("/", methods=["POST"])
def update_template(service_id, template_id):
fetched_template = dao_get_template_by_id_and_service_id(template_id=template_id, service_id=service_id)
@@ -186,6 +210,11 @@ def update_template(service_id, template_id):
)
raise InvalidRequest(errors, status_code=400)
+ # if the template category is changing, set the process_type to None to remove any priority override
+ if current_app.config["FF_TEMPLATE_CATEGORY"]:
+ if updated_template["template_category_id"] != str(fetched_template.template_category_id):
+ updated_template["process_type"] = None
+
update_dict = template_schema.load(updated_template)
if update_dict.archived:
update_dict.folder = None
@@ -205,7 +234,7 @@ def get_precompiled_template_for_service(service_id):
@template_blueprint.route("", methods=["GET"])
def get_all_templates_for_service(service_id):
templates = dao_get_all_templates_for_service(service_id=service_id)
- data = template_schema.dump(templates, many=True)
+ data = reduced_template_schema.dump(templates, many=True)
return jsonify(data=data)
@@ -253,7 +282,7 @@ def get_template_versions(service_id, template_id):
def _template_has_not_changed(current_data, updated_template):
return all(
current_data[key] == updated_template[key]
- for key in ("name", "content", "subject", "archived", "process_type", "postage")
+ for key in ("name", "content", "subject", "archived", "process_type", "postage", "template_category_id")
)
diff --git a/app/template/template_category_rest.py b/app/template/template_category_rest.py
new file mode 100644
index 0000000000..dd8ea98088
--- /dev/null
+++ b/app/template/template_category_rest.py
@@ -0,0 +1,97 @@
+from flask import Blueprint, jsonify, request
+
+from app.dao.template_categories_dao import (
+ dao_create_template_category,
+ dao_delete_template_category_by_id,
+ dao_get_all_template_categories,
+ dao_get_template_category_by_id,
+ dao_get_template_category_by_template_id,
+ dao_update_template_category,
+)
+from app.errors import register_errors
+from app.models import TemplateCategory
+from app.schemas import template_category_schema
+
+template_category_blueprint = Blueprint(
+ "template_category",
+ __name__,
+ url_prefix="/template-category",
+)
+
+register_errors(template_category_blueprint)
+
+
+@template_category_blueprint.route("", methods=["POST"])
+def create_template_category():
+ data = request.get_json()
+
+ template_category_schema.load(data)
+ template_category = TemplateCategory.from_json(data)
+
+ dao_create_template_category(template_category)
+
+ return jsonify(template_category=template_category_schema.dump(template_category)), 201
+
+
+@template_category_blueprint.route("/", methods=["GET"])
+def get_template_category(template_category_id):
+ template_category = dao_get_template_category_by_id(template_category_id)
+ return jsonify(template_category=template_category_schema.dump(template_category)), 200
+
+
+@template_category_blueprint.route("/by-template-id/", methods=["GET"])
+def get_template_category_by_template_id(template_id):
+ template_category = dao_get_template_category_by_template_id(template_id)
+ return jsonify(template_category=template_category_schema.dump(template_category)), 200
+
+
+@template_category_blueprint.route("", methods=["GET"])
+def get_template_categories():
+ template_type = request.args.get("template_type", None)
+
+ hidden = request.args.get("hidden")
+ if hidden is not None:
+ if hidden == "True":
+ hidden = True
+ elif hidden == "False":
+ hidden = False
+ else:
+ hidden = None
+
+ # Validate request args
+ if template_type is not None:
+ if template_type not in ["sms", "email"]:
+ return jsonify(message="Invalid filter 'template_type', valid template_types: 'sms', 'email'"), 400
+
+ template_categories = template_category_schema.dump(dao_get_all_template_categories(template_type, hidden), many=True)
+ return jsonify(template_categories=template_categories), 200
+
+
+@template_category_blueprint.route("/", methods=["POST"])
+def update_template_category(template_category_id):
+ current_category = dict(template_category_schema.dump(dao_get_template_category_by_id(template_category_id)))
+ current_category.update(request.get_json())
+
+ updated_category = template_category_schema.load(current_category)
+ dao_update_template_category(updated_category)
+
+ return jsonify(template_category=template_category_schema.dump(updated_category)), 200
+
+
+@template_category_blueprint.route("/", methods=["DELETE"])
+def delete_template_category(template_category_id):
+ """Deletes a template category. By default, if the template category is associated with any template, it will not be deleted.
+ This can be overriden by specifying the `cascade` query parameter.
+
+ Args:
+ template_category_id (str): The id of the template_category to delete
+
+ Request Args:
+ cascade (bool, optional): Specify whether to dissociate the category from templates that use it to force removal. Defaults to False.
+
+ Returns:
+ (flask.Response): The response message and http status code.
+ """
+ cascade = True if request.args.get("cascade") == "True" else False
+ dao_delete_template_category_by_id(template_category_id, cascade=cascade)
+ return "", 204
diff --git a/app/user/contact_request.py b/app/user/contact_request.py
index cfca30cafb..3473db150a 100644
--- a/app/user/contact_request.py
+++ b/app/user/contact_request.py
@@ -34,6 +34,9 @@ class ContactRequest:
branding_logo_name: str = field(default="")
alt_text_en: str = field(default="")
alt_text_fr: str = field(default="")
+ template_category_name_en: str = field(default="")
+ template_category_name_fr: str = field(default="")
+ template_id_link: str = field(default="")
def __post_init__(self):
# email address is mandatory for us
@@ -56,3 +59,6 @@ def is_go_live_request(self):
def is_branding_request(self):
return "branding_request" in self.support_type.lower()
+
+ def is_new_template_category_request(self):
+ return "new_template_category_request" in self.support_type.lower()
diff --git a/app/user/rest.py b/app/user/rest.py
index ea28646d41..1d1918a71c 100644
--- a/app/user/rest.py
+++ b/app/user/rest.py
@@ -518,6 +518,36 @@ def send_branding_request(user_id):
return jsonify({"status_code": status_code}), 204
+@user_blueprint.route("//new-template-category-request", methods=["POST"])
+def send_new_template_category_request(user_id):
+ contact = None
+ data = request.json
+ try:
+ user = get_user_by_id(user_id=user_id)
+ contact = ContactRequest(
+ support_type="new_template_category_request",
+ friendly_support_type="New template category request",
+ name=user.name,
+ email_address=user.email_address,
+ service_id=data["service_id"],
+ template_category_name_en=data["template_category_name_en"],
+ template_category_name_fr=data["template_category_name_fr"],
+ template_id_link=f"https://{current_app.config['ADMIN_BASE_URL']}/services/{data['service_id']}/templates/{data['template_id']}",
+ )
+ contact.tags = ["z_skip_opsgenie", "z_skip_urgent_escalation"]
+
+ except TypeError as e:
+ current_app.logger.error(e)
+ return jsonify({}), 400
+ except NoResultFound as e:
+ # This means that get_user_by_id couldn't find a user
+ current_app.logger.error(e)
+ return jsonify({}), 400
+
+ status_code = Freshdesk(contact).send_ticket()
+ return jsonify({"status_code": status_code}), 204
+
+
@user_blueprint.route("/", methods=["GET"])
@user_blueprint.route("", methods=["GET"])
def get_user(user_id=None):
diff --git a/application.py b/application.py
index 3e876fbe5f..12cea1703c 100644
--- a/application.py
+++ b/application.py
@@ -5,6 +5,8 @@
import newrelic.agent # See https://bit.ly/2xBVKBH
from apig_wsgi import make_lambda_handler
+from aws_xray_sdk.core import xray_recorder
+from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
from dotenv import load_dotenv
from flask import Flask
from werkzeug.middleware.proxy_fix import ProxyFix
@@ -13,11 +15,14 @@
load_dotenv()
-
application = Flask("api")
application.wsgi_app = ProxyFix(application.wsgi_app) # type: ignore
+
app = create_app(application)
+xray_recorder.configure(service='api')
+XRayMiddleware(app, xray_recorder)
+
apig_wsgi_handler = make_lambda_handler(app, binary_support=True)
if os.environ.get("USE_LOCAL_JINJA_TEMPLATES") == "True":
diff --git a/migrations/versions/0453_set_supports_international.py b/migrations/versions/0453_set_supports_international.py
new file mode 100644
index 0000000000..008126b815
--- /dev/null
+++ b/migrations/versions/0453_set_supports_international.py
@@ -0,0 +1,25 @@
+"""
+
+Revision ID: 0453_set_supports_international
+Revises: 0452_set_pgaudit_config
+Create Date: 2024-06-20 14:36:03.038934
+
+"""
+from alembic import op
+
+revision = "0453_set_supports_international"
+down_revision = "0452_set_pgaudit_config"
+
+
+def upgrade():
+ op.execute("UPDATE provider_details SET supports_international=True WHERE identifier='sns'")
+ op.execute("UPDATE provider_details SET supports_international=True WHERE identifier='pinpoint'")
+ op.execute("UPDATE provider_details_history SET supports_international=True WHERE identifier='sns'")
+ op.execute("UPDATE provider_details_history SET supports_international=True WHERE identifier='pinpoint'")
+
+
+def downgrade():
+ op.execute("UPDATE provider_details SET supports_international=False WHERE identifier='sns'")
+ op.execute("UPDATE provider_details SET supports_international=False WHERE identifier='pinpoint'")
+ op.execute("UPDATE provider_details_history SET supports_international=False WHERE identifier='sns'")
+ op.execute("UPDATE provider_details_history SET supports_international=False WHERE identifier='pinpoint'")
diff --git a/migrations/versions/0454_add_template_category.py b/migrations/versions/0454_add_template_category.py
new file mode 100644
index 0000000000..b98339fb9f
--- /dev/null
+++ b/migrations/versions/0454_add_template_category.py
@@ -0,0 +1,85 @@
+"""
+
+Revision ID: 0454_add_template_categories
+Revises: 0453_set_supports_international
+Create Date: 2024-06-11 13:32:00
+"""
+
+from datetime import datetime
+
+import sqlalchemy as sa
+from alembic import op
+from flask import current_app
+from sqlalchemy.dialects import postgresql
+
+revision = "0454_add_template_category"
+down_revision = "0453_set_supports_international"
+
+
+def upgrade():
+ op.create_table(
+ "template_categories",
+ sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True, nullable=False),
+ sa.Column("name_en", sa.String(length=255), nullable=False),
+ sa.Column("name_fr", sa.String(length=255), nullable=False),
+ sa.Column("description_en", sa.String(length=255), nullable=True),
+ sa.Column("description_fr", sa.String(length=255), nullable=True),
+ sa.Column("sms_process_type", sa.String(length=255), nullable=False),
+ sa.Column("email_process_type", sa.String(length=255), nullable=False),
+ sa.Column("hidden", sa.Boolean(), nullable=False),
+ sa.Column("created_at", sa.DateTime(), server_default=sa.func.now(), nullable=False),
+ sa.Column("updated_at", sa.DateTime(), server_default=sa.func.now(), nullable=True),
+ sa.UniqueConstraint("name_en"),
+ sa.UniqueConstraint("name_fr"),
+ )
+
+ # Insert the generic low, medium, and high categories
+ op.execute(
+ "INSERT INTO template_categories (id, name_en, name_fr, sms_process_type, email_process_type, hidden, created_at) VALUES ('{}', 'Low Category (Bulk)', 'Catégorie Basse (En Vrac)', 'low', 'low', true, now())".format(
+ current_app.config["DEFAULT_TEMPLATE_CATEGORY_LOW"],
+ )
+ )
+ op.execute(
+ "INSERT INTO template_categories (id, name_en, name_fr, sms_process_type, email_process_type, hidden, created_at) VALUES ('{}', 'Medium Category (Normal)', 'Catégorie Moyenne (Normale)', 'low', 'low', true, now())".format(
+ current_app.config["DEFAULT_TEMPLATE_CATEGORY_MEDIUM"]
+ )
+ )
+ op.execute(
+ "INSERT INTO template_categories (id, name_en, name_fr, sms_process_type, email_process_type, hidden, created_at) VALUES ('{}', 'High Category (Priority)', 'Catégorie Haute (Priorité)', 'low', 'low', true, now())".format(
+ current_app.config["DEFAULT_TEMPLATE_CATEGORY_HIGH"]
+ )
+ )
+
+ op.add_column("templates", sa.Column("template_category_id", postgresql.UUID(as_uuid=True), nullable=True))
+ op.add_column("templates_history", sa.Column("template_category_id", postgresql.UUID(as_uuid=True), nullable=True))
+ op.create_index(
+ op.f("ix_template_category_id"),
+ "templates",
+ ["template_category_id"],
+ unique=False,
+ )
+ op.create_index(
+ op.f("ix_template_categories_name_en"),
+ "template_categories",
+ ["name_en"],
+ unique=False,
+ )
+ op.create_index(
+ op.f("ix_template_categories_name_fr"),
+ "template_categories",
+ ["name_fr"],
+ unique=False,
+ )
+ op.alter_column("templates", "process_type", nullable=True)
+ op.create_foreign_key("fk_template_template_categories", "templates", "template_categories", ["template_category_id"], ["id"])
+
+
+def downgrade():
+ op.drop_constraint("fk_template_template_categories", "templates", type_="foreignkey")
+ op.drop_index(op.f("ix_template_category_id"), table_name="templates")
+ op.drop_index(op.f("ix_template_categories_name_en"), table_name="template_categories")
+ op.drop_index(op.f("ix_template_categories_name_fr"), table_name="template_categories")
+ op.alter_column("templates", "process_type", nullable=False)
+ op.drop_column("templates", "template_category_id")
+ op.drop_column("templates_history", "template_category_id")
+ op.drop_table("template_categories")
diff --git a/migrations/versions/0455_add_starter_category.py b/migrations/versions/0455_add_starter_category.py
new file mode 100644
index 0000000000..efe74bf2d0
--- /dev/null
+++ b/migrations/versions/0455_add_starter_category.py
@@ -0,0 +1,100 @@
+"""
+
+Revision ID: 0455_add_starter_category
+Revises: 0454_add_template_category
+Create Date: 2024-06-11 13:32:00
+"""
+from alembic import op
+
+revision = "0455_add_starter_category"
+down_revision = "0454_add_template_category"
+
+CAT_ALERT_ID = "1d8ce435-a7e5-431b-aaa2-a418bc4d14f9"
+CAT_AUTH_ID = "b6c42a7e-2a26-4a07-802b-123a5c3198a9"
+CAT_AUTO_ID = "977e2a00-f957-4ff0-92f2-ca3286b24786"
+CAT_DECISION_ID = "e81678c0-4897-4111-b9d0-172f6b595f89"
+CAT_INFO_ID = "207b293c-2ae5-48e8-836d-fcabd60b2153"
+CAT_REMINDER_ID = "edb966f3-4a4c-47a4-96ab-05ff259b919c"
+CAT_REQUEST_ID = "e0b8fbe5-f435-4977-8fc8-03f13d9296a5"
+CAT_STATUS_ID = "55eb1137-6dc6-4094-9031-f61124a279dc"
+CAT_TEST_ID = "7c16aa95-e2e1-4497-81d6-04c656520fe4"
+
+# List of category IDs
+category_ids = [
+ CAT_ALERT_ID,
+ CAT_AUTH_ID,
+ CAT_AUTO_ID,
+ CAT_DECISION_ID,
+ CAT_INFO_ID,
+ CAT_REMINDER_ID,
+ CAT_REQUEST_ID,
+ CAT_STATUS_ID,
+ CAT_TEST_ID,
+]
+
+# Corresponding English and French names and descriptions and process_type
+category_data = [
+ ("Alert", "Alerte", "System checks and monitoring", "Contrôles et suivi du système", "medium", "medium"),
+ (
+ "Authentication",
+ "Authentification",
+ "Password resets and two factor verification",
+ "Réinitialisations de mots de passe et vérification à deux facteurs",
+ "priority",
+ "priority",
+ ),
+ (
+ "Automatic reply",
+ "Réponse automatique",
+ "No-reply and confirmation messages",
+ "Messages auxquels il est impossible de répondre et messages de confirmation",
+ "priority",
+ "priority",
+ ),
+ ("Decision", "Décision", "Permits, documents and results", "Permis, documents et résultats", "low", "low"),
+ (
+ "Information blast",
+ "Information de masse",
+ "Newsletters, surveys and general information",
+ "Infolettres, sondages et renseignements généraux",
+ "bulk",
+ "bulk",
+ ),
+ ("Reminder", "Rappel", "Appointments and deadlines", "Rendez-vous et échéances", "normal", "normal"),
+ ("Request", "Demande", "Request: Follow up and next steps", "Suivis et prochaines étapes", "normal", "normal"),
+ ("Status update", "État d’avancement", "Changes and progress", "Changements et progrès", "normal", "normal"),
+ ("Test", "Test", "Practice messages", "Messages à titre d’entraînement", "bulk", "bulk"),
+]
+
+
+def upgrade():
+ # Insert new process_type
+ op.execute("INSERT INTO template_process_type (name) VALUES ('low')")
+ op.execute("INSERT INTO template_process_type (name) VALUES ('medium')")
+ op.execute("INSERT INTO template_process_type (name) VALUES ('high')")
+
+ def insert_statement(id, name_en, name_fr, description_en, description_fr, sms_process_type, email_process_type):
+ # Escape single quotes in string values
+ name_fr = name_fr.replace("'", "''")
+ description_fr = description_fr.replace("'", "''")
+
+ return f"""
+ INSERT INTO template_categories
+ (id, name_en, name_fr, description_en, description_fr, sms_process_type, email_process_type, hidden, created_at)
+ VALUES
+ ('{id}', '{name_en}', '{name_fr}', '{description_en}', '{description_fr}', '{sms_process_type}', '{email_process_type}', false, now())
+ """
+
+ for id, (name_en, name_fr, desc_en, desc_fr, sms_process_type, email_process_type) in zip(category_ids, category_data):
+ stmt = insert_statement(id, name_en, name_fr, desc_en, desc_fr, sms_process_type, email_process_type)
+ op.execute(stmt)
+
+
+def downgrade():
+ for id in category_ids:
+ op.execute(f"DELETE FROM template_categories WHERE id = '{id}'")
+
+ # Delete process_type
+ op.execute("DELETE FROM template_process_type WHERE name = 'low'")
+ op.execute("DELETE FROM template_process_type WHERE name = 'medium'")
+ op.execute("DELETE FROM template_process_type WHERE name = 'high'")
diff --git a/migrations/versions/0456_update_template_categories.py b/migrations/versions/0456_update_template_categories.py
new file mode 100644
index 0000000000..add52423ba
--- /dev/null
+++ b/migrations/versions/0456_update_template_categories.py
@@ -0,0 +1,73 @@
+"""
+Revision ID: 0456_update_template_categories
+Revises: 0455_add_starter_category
+Create Date: 2024-06-11 13:32:00
+"""
+import sqlalchemy as sa
+from alembic import op
+
+revision = "0456_update_template_categories"
+down_revision = "0455_add_starter_category"
+
+LOW_CATEGORY_ID = "0dda24c2-982a-4f44-9749-0e38b2607e89"
+MEDIUM_CATEGORY_ID = "f75d6706-21b7-437e-b93a-2c0ab771e28e"
+HIGH_CATEGORY_ID = "c4f87d7c-a55b-4c0f-91fe-e56c65bb1871"
+CAT_ALERT_ID = "1d8ce435-a7e5-431b-aaa2-a418bc4d14f9"
+CAT_AUTH_ID = "b6c42a7e-2a26-4a07-802b-123a5c3198a9"
+CAT_AUTO_ID = "977e2a00-f957-4ff0-92f2-ca3286b24786"
+CAT_DECISION_ID = "e81678c0-4897-4111-b9d0-172f6b595f89"
+CAT_INFO_ID = "207b293c-2ae5-48e8-836d-fcabd60b2153"
+CAT_REMINDER_ID = "edb966f3-4a4c-47a4-96ab-05ff259b919c"
+CAT_REQUEST_ID = "e0b8fbe5-f435-4977-8fc8-03f13d9296a5"
+CAT_STATUS_ID = "55eb1137-6dc6-4094-9031-f61124a279dc"
+CAT_TEST_ID = "7c16aa95-e2e1-4497-81d6-04c656520fe4"
+
+SHORT_CODE_CATS = (HIGH_CATEGORY_ID, CAT_AUTH_ID, CAT_AUTO_ID, CAT_DECISION_ID, CAT_REMINDER_ID, CAT_REQUEST_ID, CAT_STATUS_ID)
+LONG_CODE_CATS = (LOW_CATEGORY_ID, MEDIUM_CATEGORY_ID, CAT_ALERT_ID, CAT_INFO_ID, CAT_TEST_ID)
+
+sms_options = ("short_code", "long_code")
+sms_sending_vehicle = sa.Enum(*sms_options, name="sms_sending_vehicle")
+
+
+def upgrade():
+ sms_sending_vehicle.create(op.get_bind(), checkfirst=True)
+
+ op.add_column(
+ "template_categories", sa.Column("sms_sending_vehicle", sms_sending_vehicle, server_default="long_code", nullable=False)
+ )
+
+ # Update the generic categories
+ op.execute(
+ "UPDATE template_categories SET sms_process_type = 'bulk', email_process_type = 'bulk' WHERE id = '{}'".format(
+ LOW_CATEGORY_ID,
+ )
+ )
+ op.execute(
+ "UPDATE template_categories SET sms_process_type = 'normal', email_process_type = 'normal' WHERE id = '{}'".format(
+ MEDIUM_CATEGORY_ID,
+ )
+ )
+ op.execute(
+ "UPDATE template_categories SET sms_process_type = 'priority', email_process_type = 'priority' WHERE id = '{}'".format(
+ HIGH_CATEGORY_ID,
+ )
+ )
+
+ # Update the sms_sending_vehicle for the starter categories
+
+ op.execute(
+ "UPDATE template_categories SET sms_sending_vehicle = 'short_code' WHERE id in {}".format(
+ SHORT_CODE_CATS,
+ )
+ )
+
+ op.execute(
+ "UPDATE template_categories SET sms_sending_vehicle = 'long_code' WHERE id in {}".format(
+ LONG_CODE_CATS,
+ )
+ )
+
+
+def downgrade():
+ op.drop_column("template_categories", "sms_sending_vehicle")
+ sms_sending_vehicle.drop(op.get_bind(), checkfirst=True)
diff --git a/migrations/versions/0457_update_categories.py b/migrations/versions/0457_update_categories.py
new file mode 100644
index 0000000000..09bcb96d76
--- /dev/null
+++ b/migrations/versions/0457_update_categories.py
@@ -0,0 +1,29 @@
+"""
+Revision ID: 0457_update_categories
+Revises: 0456_update_template_categories
+Create Date: 2024-06-25 13:32:00
+"""
+from alembic import op
+
+revision = "0457_update_categories"
+down_revision = "0456_update_template_categories"
+
+CAT_ALERT_ID = "1d8ce435-a7e5-431b-aaa2-a418bc4d14f9"
+CAT_DECISION_ID = "e81678c0-4897-4111-b9d0-172f6b595f89"
+
+
+def upgrade():
+ op.execute(
+ "UPDATE template_categories SET email_process_type='normal', sms_process_type='normal' WHERE id = '{}'".format(
+ CAT_ALERT_ID,
+ )
+ )
+ op.execute(
+ "UPDATE template_categories SET email_process_type='bulk', sms_process_type='bulk' WHERE id = '{}'".format(
+ CAT_DECISION_ID,
+ )
+ )
+
+
+def downgrade():
+ pass
diff --git a/mypy.ini b/mypy.ini
index e88f424a3e..95cf428cd6 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -83,4 +83,7 @@ ignore_missing_imports = True
ignore_missing_imports = True
[mypy-simple_salesforce.*]
+ignore_missing_imports = True
+
+[mypy-aws_xray_sdk.*]
ignore_missing_imports = True
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
index 8209d2ed6f..9afdaa8ac3 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -208,19 +208,34 @@ files = [
[package.dependencies]
aiohttp = "*"
+[[package]]
+name = "aws-xray-sdk"
+version = "2.14.0"
+description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aws_xray_sdk-2.14.0-py2.py3-none-any.whl", hash = "sha256:cfbe6feea3d26613a2a869d14c9246a844285c97087ad8f296f901633554ad94"},
+ {file = "aws_xray_sdk-2.14.0.tar.gz", hash = "sha256:aab843c331af9ab9ba5cefb3a303832a19db186140894a523edafc024cc0493c"},
+]
+
+[package.dependencies]
+botocore = ">=1.11.3"
+wrapt = "*"
+
[[package]]
name = "awscli"
-version = "1.32.100"
+version = "1.33.5"
description = "Universal Command Line Environment for AWS."
optional = false
python-versions = ">=3.8"
files = [
- {file = "awscli-1.32.100-py3-none-any.whl", hash = "sha256:46e4a44dafeffe63980ab2cd0240aa15a4879cf5d84f210c9eb0facc05e7bf0a"},
- {file = "awscli-1.32.100.tar.gz", hash = "sha256:7bd06388d7853508f96a91291c28b0745ac0a5ac73276cb7db48478d6d3c2a70"},
+ {file = "awscli-1.33.5-py3-none-any.whl", hash = "sha256:46eb5858f154723d3d11900b33035f24b51882758d5f3f753e472ca12375bc46"},
+ {file = "awscli-1.33.5.tar.gz", hash = "sha256:eda29ad39b0907505f78d693e6cc1dc76c2d47a0e5cf5376e86a791d5e830535"},
]
[package.dependencies]
-botocore = "1.34.100"
+botocore = "1.34.123"
colorama = ">=0.2.5,<0.4.7"
docutils = ">=0.10,<0.17"
PyYAML = ">=3.10,<6.1"
@@ -286,33 +301,33 @@ files = [
[[package]]
name = "black"
-version = "23.7.0"
+version = "23.12.1"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
- {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"},
- {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"},
- {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"},
- {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"},
- {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"},
- {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"},
- {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"},
- {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"},
- {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"},
- {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"},
- {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"},
- {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"},
- {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"},
- {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"},
- {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"},
- {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"},
- {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"},
- {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"},
- {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"},
- {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"},
- {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"},
- {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"},
+ {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"},
+ {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"},
+ {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"},
+ {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"},
+ {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"},
+ {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"},
+ {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"},
+ {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"},
+ {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"},
+ {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"},
+ {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"},
+ {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"},
+ {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"},
+ {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"},
+ {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"},
+ {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"},
+ {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"},
+ {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"},
+ {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"},
+ {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"},
+ {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"},
+ {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"},
]
[package.dependencies]
@@ -322,10 +337,11 @@ packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
[package.extras]
colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)"]
+d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
@@ -390,13 +406,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.34.100"
+version = "1.34.123"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
- {file = "botocore-1.34.100-py3-none-any.whl", hash = "sha256:ee516fb9e9e906d311f2a9921afaf79c594db239a5b4b626e89e6960401aad0b"},
- {file = "botocore-1.34.100.tar.gz", hash = "sha256:513bea60c6531af8e1ae1fdb2947e3ef99712f39c58f4656b5efef9cb6f75a13"},
+ {file = "botocore-1.34.123-py3-none-any.whl", hash = "sha256:8c34ada2a708c82e7174bff700611643db7ce2cb18f1130c35045c24310d299d"},
+ {file = "botocore-1.34.123.tar.gz", hash = "sha256:a8577f6574600c4d159b5cd103ee05744a443d77f7778304e17307940b369c4f"},
]
[package.dependencies]
@@ -405,7 +421,7 @@ python-dateutil = ">=2.1,<3.0.0"
urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}
[package.extras]
-crt = ["awscrt (==0.20.9)"]
+crt = ["awscrt (==0.20.11)"]
[[package]]
name = "brotli"
@@ -501,13 +517,13 @@ files = [
[[package]]
name = "cachelib"
-version = "0.10.2"
+version = "0.12.0"
description = "A collection of cache libraries in the same API interface."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "cachelib-0.10.2-py3-none-any.whl", hash = "sha256:42d49f2fad9310dd946d7be73d46776bcd4d5fde4f49ad210cfdd447fbdfc346"},
- {file = "cachelib-0.10.2.tar.gz", hash = "sha256:593faeee62a7c037d50fc835617a01b887503f972fb52b188ae7e50e9cb69740"},
+ {file = "cachelib-0.12.0-py3-none-any.whl", hash = "sha256:038f4d855afc3eb8caab10458f6eac55c328911f9055824c22c2f259ef9ed3a3"},
+ {file = "cachelib-0.12.0.tar.gz", hash = "sha256:8243029a028436fd23229113dee517c0700bb43a8a289ec5a963e4af9ca2b194"},
]
[[package]]
@@ -595,75 +611,63 @@ files = [
[[package]]
name = "cffi"
-version = "1.15.1"
+version = "1.16.0"
description = "Foreign Function Interface for Python calling C code."
optional = false
-python-versions = "*"
+python-versions = ">=3.8"
files = [
- {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
- {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
- {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
- {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
- {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
- {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
- {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
- {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
- {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
- {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
- {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
- {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
- {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
- {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
- {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
- {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
- {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
- {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
- {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
- {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
- {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
- {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
- {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
- {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
- {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
- {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
- {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
- {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
- {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
- {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
- {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
- {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
- {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
- {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
- {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
- {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
- {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
- {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
- {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
- {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
- {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
- {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
- {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
- {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
- {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
- {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
- {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
- {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
- {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
- {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
- {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
- {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
- {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
- {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
- {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
- {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
- {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
- {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
- {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
- {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
- {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
- {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
- {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
- {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
+ {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
+ {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
+ {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
+ {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
+ {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
+ {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
+ {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
+ {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
+ {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
+ {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
+ {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
+ {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
+ {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
+ {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
+ {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
]
[package.dependencies]
@@ -1155,19 +1159,6 @@ Werkzeug = ">=2.3.7"
async = ["asgiref (>=3.2)"]
dotenv = ["python-dotenv"]
-[[package]]
-name = "flask-basicauth"
-version = "0.2.0"
-description = "HTTP basic access authentication for Flask."
-optional = false
-python-versions = "*"
-files = [
- {file = "Flask-BasicAuth-0.2.0.tar.gz", hash = "sha256:df5ebd489dc0914c224419da059d991eb72988a01cdd4b956d52932ce7d501ff"},
-]
-
-[package.dependencies]
-Flask = "*"
-
[[package]]
name = "flask-bcrypt"
version = "1.0.1"
@@ -1197,6 +1188,21 @@ files = [
[package.dependencies]
Flask = ">=0.9"
+[[package]]
+name = "flask-login"
+version = "0.6.3"
+description = "User authentication and session management for Flask."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333"},
+ {file = "Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d"},
+]
+
+[package.dependencies]
+Flask = ">=1.0.4"
+Werkzeug = ">=1.0.1"
+
[[package]]
name = "flask-marshmallow"
version = "0.14.0"
@@ -1276,13 +1282,13 @@ resolved_reference = "500e732dd1b975a56ab06a46bd1a20a21e682262"
[[package]]
name = "freezegun"
-version = "1.2.2"
+version = "1.4.0"
description = "Let your Python tests travel through time"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"},
- {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"},
+ {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"},
+ {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"},
]
[package.dependencies]
@@ -1674,13 +1680,13 @@ files = [
[[package]]
name = "iso8601"
-version = "2.0.0"
+version = "2.1.0"
description = "Simple module to parse ISO 8601 dates"
optional = false
python-versions = ">=3.7,<4.0"
files = [
- {file = "iso8601-2.0.0-py3-none-any.whl", hash = "sha256:ebe10061b932edb8a8e33cc635d661926c59b9c3bed7a4f4edca8c62d400af10"},
- {file = "iso8601-2.0.0.tar.gz", hash = "sha256:739960d37c74c77bd9bd546a76562ccb581fe3d4820ff5c3141eb49c839fda8f"},
+ {file = "iso8601-2.1.0-py3-none-any.whl", hash = "sha256:aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242"},
+ {file = "iso8601-2.1.0.tar.gz", hash = "sha256:6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df"},
]
[[package]]
@@ -1699,30 +1705,27 @@ six = "*"
[[package]]
name = "isort"
-version = "5.12.0"
+version = "5.13.2"
description = "A Python utility / library to sort Python imports."
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
- {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
+ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
+ {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
]
[package.extras]
-colors = ["colorama (>=0.4.3)"]
-pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
-plugins = ["setuptools"]
-requirements-deprecated-finder = ["pip-api", "pipreqs"]
+colors = ["colorama (>=0.4.6)"]
[[package]]
name = "itsdangerous"
-version = "2.1.2"
+version = "2.2.0"
description = "Safely pass data to untrusted environments and back."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
- {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
+ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
+ {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
]
[[package]]
@@ -1832,29 +1835,28 @@ zookeeper = ["kazoo (>=2.8.0)"]
[[package]]
name = "locust"
-version = "2.16.1"
+version = "2.23.1"
description = "Developer friendly load testing framework"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "locust-2.16.1-py3-none-any.whl", hash = "sha256:d0f01f9fca6a7d9be987b32185799d9e219fce3b9a3b8250ea03e88003335804"},
- {file = "locust-2.16.1.tar.gz", hash = "sha256:cd54f179b679ae927e9b3ffd2b6a7c89c1078103cfbe96b4dd53c7872774b619"},
+ {file = "locust-2.23.1-py3-none-any.whl", hash = "sha256:96013a460a4b4d6d4fd46c70e6ff1fd2b6e03b48ddb1b48d1513d3134ba2cecf"},
+ {file = "locust-2.23.1.tar.gz", hash = "sha256:6cc729729e5ebf5852fc9d845302cfcf0ab0132f198e68b3eb0c88b438b6a863"},
]
[package.dependencies]
-ConfigArgParse = ">=1.0"
+ConfigArgParse = ">=1.5.5"
flask = ">=2.0.0"
-Flask-BasicAuth = ">=0.2.0"
Flask-Cors = ">=3.0.10"
-gevent = ">=20.12.1"
-geventhttpclient = ">=2.0.2"
-msgpack = ">=0.6.2"
-psutil = ">=5.6.7"
+Flask-Login = ">=0.6.3"
+gevent = ">=22.10.2"
+geventhttpclient = ">=2.0.11"
+msgpack = ">=1.0.0"
+psutil = ">=5.9.1"
pywin32 = {version = "*", markers = "platform_system == \"Windows\""}
-pyzmq = ">=22.2.1,<23.0.0 || >23.0.0"
-requests = ">=2.23.0"
+pyzmq = ">=25.0.0"
+requests = ">=2.26.0"
roundrobin = ">=0.0.2"
-typing-extensions = ">=3.7.4.3"
Werkzeug = ">=2.0.0"
[[package]]
@@ -2054,22 +2056,21 @@ files = [
[[package]]
name = "marshmallow"
-version = "3.20.2"
+version = "3.21.0"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.8"
files = [
- {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"},
- {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"},
+ {file = "marshmallow-3.21.0-py3-none-any.whl", hash = "sha256:e7997f83571c7fd476042c2c188e4ee8a78900ca5e74bd9c8097afa56624e9bd"},
+ {file = "marshmallow-3.21.0.tar.gz", hash = "sha256:20f53be28c6e374a711a16165fb22a8dc6003e3f7cda1285e3ca777b9193885b"},
]
[package.dependencies]
packaging = ">=17.0"
[package.extras]
-dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"]
-docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
-lint = ["pre-commit (>=2.4,<4.0)"]
+dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"]
+docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"]
tests = ["pytest", "pytz", "simplejson"]
[[package]]
@@ -2129,13 +2130,13 @@ files = [
[[package]]
name = "moto"
-version = "4.1.11"
+version = "4.2.14"
description = ""
optional = false
python-versions = ">=3.7"
files = [
- {file = "moto-4.1.11-py2.py3-none-any.whl", hash = "sha256:5003126c46ce70fe351ff1cb67dc8d9a5983f403fae13b7628b0fb503d19039e"},
- {file = "moto-4.1.11.tar.gz", hash = "sha256:f3e966ba1460751e19eab5356545813b29c05478b47eb0da445d688949339be2"},
+ {file = "moto-4.2.14-py2.py3-none-any.whl", hash = "sha256:6d242dbbabe925bb385ddb6958449e5c827670b13b8e153ed63f91dbdb50372c"},
+ {file = "moto-4.2.14.tar.gz", hash = "sha256:8f9263ca70b646f091edcc93e97cda864a542e6d16ed04066b1370ed217bd190"},
]
[package.dependencies]
@@ -2150,26 +2151,24 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1"
xmltodict = "*"
[package.extras]
-all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
-apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
+all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
apigatewayv2 = ["PyYAML (>=5.1)"]
appsync = ["graphql-core"]
awslambda = ["docker (>=3.0.0)"]
batch = ["docker (>=3.0.0)"]
-cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
-ds = ["sshpubkeys (>=3.1.0)"]
-dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.3)"]
-dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.3)"]
-ebs = ["sshpubkeys (>=3.1.0)"]
+dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"]
+dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"]
ec2 = ["sshpubkeys (>=3.1.0)"]
-efs = ["sshpubkeys (>=3.1.0)"]
-eks = ["sshpubkeys (>=3.1.0)"]
glue = ["pyparsing (>=3.0.7)"]
iotdata = ["jsondiff (>=1.1.2)"]
-route53resolver = ["sshpubkeys (>=3.1.0)"]
-s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.3.3)"]
-server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
+s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.0)"]
+s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.0)"]
+server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
ssm = ["PyYAML (>=5.1)"]
xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"]
@@ -2404,28 +2403,30 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"]
[[package]]
name = "newrelic"
-version = "6.10.0.165"
+version = "8.10.0"
description = "New Relic Python Agent"
optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*"
-files = [
- {file = "newrelic-6.10.0.165-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:82815f47049ee34544b035a060711fccbd75cf614b13244873ee2bd1285f2ca7"},
- {file = "newrelic-6.10.0.165-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:092c1c3411ebc3ec7bb7c197a91822076b34d332e23272474ea8c4199d8f75ff"},
- {file = "newrelic-6.10.0.165-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:32bebc4f2dd5f098e20b7f2d02e21a8d7ab200d448b08ad984811da1228f5981"},
- {file = "newrelic-6.10.0.165-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:cc69de0324686e4a809b89ddf24a019fba6a72e12e6b8a308f9820e376321e3e"},
- {file = "newrelic-6.10.0.165-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a29c3a09eeeada34b178dafe870f6fbcad0c2572fb7aa0d69e57b9bb80c321ff"},
- {file = "newrelic-6.10.0.165-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57670136b514972a9918d828a00ee73e7f09f4cd43b7a98ba5e4810848e1b35e"},
- {file = "newrelic-6.10.0.165-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4702b96e8f0e46c385445c80c16e081620e3d0d6d05443779ed6c1a484dfc8ff"},
- {file = "newrelic-6.10.0.165-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:66091fb10e5d05937b610c2be50562beaa5b55d92b323285fbe4e34a463424ac"},
- {file = "newrelic-6.10.0.165-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78fbd336ca53c3ec35656d6b8a0df1e2bb1776e95aa1bc010da23ea2e4a21554"},
- {file = "newrelic-6.10.0.165-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e50e4072547c1e61d03b2f82a4b333d8f8416bcc3d18bb0526ae7c6ff40d395"},
- {file = "newrelic-6.10.0.165-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f88eac897881b737b13413e8f8337b4221315cd2c86ff973f126f6d075407cc"},
- {file = "newrelic-6.10.0.165-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a22a38268c45f55f84cf8c21f8c5c259e23402592db6d2c6c05b4e4de8bb3ad4"},
- {file = "newrelic-6.10.0.165.tar.gz", hash = "sha256:17743407935e75375342ced2fd380244c41001394a0fac92efb3038166cc60f1"},
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+files = [
+ {file = "newrelic-8.10.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:cf3b67327e64d2b50aec855821199b2bc46bc0c2d142df269d420748dd49b31b"},
+ {file = "newrelic-8.10.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9601d886669fe1e0c23bbf91fb68ab23086011816ba96c6dd714c60dc0a74088"},
+ {file = "newrelic-8.10.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:55a64d2abadf69bbc7bb01178332c4f25247689a97b01a62125d162ea7ec8974"},
+ {file = "newrelic-8.10.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:b6cddd869ac8f7f32f6de8212ae878a21c9e63f2183601d239a76d38c5d5a366"},
+ {file = "newrelic-8.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9af0130e1f1ca032c606d15a6d5558d27273a063b7c53702218b3beccd50b23"},
+ {file = "newrelic-8.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2fd24b32dbf510e4e3fe40b71ad395dd73a4bb9f5eaf59eb5ff22ed76ba2d41"},
+ {file = "newrelic-8.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2567ba9e29fd7b9f4c23cf16a5a149097eb0e5da587734c5a40732d75aaec189"},
+ {file = "newrelic-8.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9c9f7842234a51e4a2fdafe42c42ebe0b6b1966279f2f91ec8a9c16480c2236"},
+ {file = "newrelic-8.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:365d3b1a10d1021217beeb28a93c1356a9feb94bd24f02972691dc71227e40dc"},
+ {file = "newrelic-8.10.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd0666557419dbe11b04e3b38480b3113b3c4670d42619420d60352a1956dd8"},
+ {file = "newrelic-8.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722072d57e2d416de68b650235878583a2a8809ea39c7dd5c8c11a19089b7665"},
+ {file = "newrelic-8.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbda843100c99ac3291701c0a70fedb705c0b0707800c60b93657d3985aae357"},
+ {file = "newrelic-8.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ed36fb91f152128825459eae9a52da364352ea95bcd78b405b0a5b8057b2ed7"},
+ {file = "newrelic-8.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc975c29548e25805ead794d9de7ab3cb8ba4a6a106098646e1ab03112d1432e"},
+ {file = "newrelic-8.10.0.tar.gz", hash = "sha256:8a2271b76ea684a63936302579d6085d46a2b54042cb91dc9b0d71a0cd4dd38b"},
]
[package.extras]
-infinite-tracing = ["grpcio (<2)", "protobuf (<4)"]
+infinite-tracing = ["grpcio", "protobuf"]
[[package]]
name = "notifications-python-client"
@@ -2444,7 +2445,7 @@ requests = ">=2.0.0"
[[package]]
name = "notifications-utils"
-version = "52.2.4"
+version = "52.2.7"
description = "Shared python code for Notification - Provides logging utils etc."
optional = false
python-versions = "~3.10.9"
@@ -2452,7 +2453,7 @@ files = []
develop = false
[package.dependencies]
-awscli = "1.32.100"
+awscli = "1.33.5"
bleach = "6.1.0"
boto3 = "1.34.100"
cachetools = "4.2.4"
@@ -2460,7 +2461,7 @@ certifi = "^2023.7.22"
cryptography = "^42.0.3"
Flask = "2.3.3"
Flask-Redis = "0.4.0"
-itsdangerous = "2.1.2"
+itsdangerous = "2.2.0"
Jinja2 = "^3.0.0"
markupsafe = "2.1.5"
mistune = "0.8.4"
@@ -2474,13 +2475,13 @@ PyYAML = "6.0.1"
requests = "2.31.0"
smartypants = "2.0.1"
statsd = "3.3.0"
-werkzeug = "2.3.7"
+werkzeug = "3.0.3"
[package.source]
type = "git"
url = "https://github.com/cds-snc/notifier-utils.git"
-reference = "52.2.4"
-resolved_reference = "1e2c279333ee1b86671b82d8f562bb3e98446500"
+reference = "52.2.7"
+resolved_reference = "cd8943a30aa75f657951716111ff68df737b0fff"
[[package]]
name = "ordered-set"
@@ -3052,13 +3053,13 @@ pytest = ">=3.10"
[[package]]
name = "pytest-mock"
-version = "3.11.1"
+version = "3.12.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"},
- {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"},
+ {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"},
+ {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"},
]
[package.dependencies]
@@ -3069,13 +3070,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"]
[[package]]
name = "pytest-mock-resources"
-version = "2.9.2"
+version = "2.10.0"
description = "A pytest plugin for easily instantiating reproducible mock resources."
optional = false
python-versions = ">=3.7,<4"
files = [
- {file = "pytest_mock_resources-2.9.2-py3-none-any.whl", hash = "sha256:31a585e2fcf96303a98586f7024e4db8f668d8f5a9310cd79c554fd9f5687d49"},
- {file = "pytest_mock_resources-2.9.2.tar.gz", hash = "sha256:3b65808f1e8f01233038768c522fca3de65b01d7bc4b077ac3aa28ce41da04d3"},
+ {file = "pytest_mock_resources-2.10.0-py3-none-any.whl", hash = "sha256:f67eccc92d645328cc4bf532c9b08ff837b0f60a878d230c4ae4efc17f73a160"},
+ {file = "pytest_mock_resources-2.10.0.tar.gz", hash = "sha256:acea4edd98de70d56e0949d8e5ab2f75d412b3a0f61437252284146f931f4e09"},
]
[package.dependencies]
@@ -3732,13 +3733,13 @@ files = [
[[package]]
name = "tldextract"
-version = "3.4.4"
+version = "3.5.0"
description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well."
optional = false
python-versions = ">=3.7"
files = [
- {file = "tldextract-3.4.4-py3-none-any.whl", hash = "sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2"},
- {file = "tldextract-3.4.4.tar.gz", hash = "sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234"},
+ {file = "tldextract-3.5.0-py3-none-any.whl", hash = "sha256:2cb271ca8d06ea1630a1361b58edad14e0cf81f34ce3c90b052854528fe2a281"},
+ {file = "tldextract-3.5.0.tar.gz", hash = "sha256:4df1c65b95be61d59428e8611e955e54e6f1d4483d3e8d5733d3a9062155e910"},
]
[package.dependencies]
@@ -3812,13 +3813,13 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.
[[package]]
name = "types-boto"
-version = "2.49.18.20240205"
+version = "2.49.18.9"
description = "Typing stubs for boto"
optional = false
-python-versions = ">=3.8"
+python-versions = "*"
files = [
- {file = "types-boto-2.49.18.20240205.tar.gz", hash = "sha256:6c7f3945e5759e1f8a760e2843adbeb1eea64f869f3a3070af7cfcfc25ea71bd"},
- {file = "types_boto-2.49.18.20240205-py3-none-any.whl", hash = "sha256:9873214ce37756a6145c165fb9beaf80cb4ac1df5a5967f6a0945109c8c4469a"},
+ {file = "types-boto-2.49.18.9.tar.gz", hash = "sha256:fe711d938c237be50346a1bdc2231d3170453fe734789075dd088458e4e9442d"},
+ {file = "types_boto-2.49.18.9-py3-none-any.whl", hash = "sha256:b44e8aead5e34bc336a813af90fdbb9ac5bb1091de839042628163463d9948eb"},
]
[[package]]
@@ -3899,13 +3900,13 @@ urllib3 = ">=2"
[[package]]
name = "typing-extensions"
-version = "4.7.1"
-description = "Backported and Experimental Type Hints for Python 3.7+"
+version = "4.10.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"},
- {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
+ {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"},
+ {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"},
]
[[package]]
@@ -3982,13 +3983,13 @@ files = [
[[package]]
name = "werkzeug"
-version = "2.3.7"
+version = "3.0.3"
description = "The comprehensive WSGI web application library."
optional = false
python-versions = ">=3.8"
files = [
- {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"},
- {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"},
+ {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"},
+ {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"},
]
[package.dependencies]
@@ -3997,6 +3998,85 @@ MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog (>=2.3)"]
+[[package]]
+name = "wrapt"
+version = "1.16.0"
+description = "Module for decorators, wrappers and monkey patching."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
+ {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"},
+ {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"},
+ {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"},
+ {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"},
+ {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"},
+ {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"},
+ {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"},
+ {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"},
+ {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"},
+ {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"},
+ {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"},
+ {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"},
+ {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"},
+ {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"},
+ {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"},
+ {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"},
+ {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"},
+ {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"},
+ {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"},
+ {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"},
+ {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"},
+ {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"},
+ {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"},
+ {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"},
+ {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"},
+ {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"},
+ {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"},
+ {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"},
+ {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"},
+ {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"},
+ {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"},
+ {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"},
+ {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"},
+ {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"},
+ {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"},
+ {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"},
+ {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"},
+ {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"},
+ {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"},
+ {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"},
+ {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"},
+ {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"},
+ {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"},
+ {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"},
+ {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"},
+ {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"},
+ {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"},
+ {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"},
+ {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"},
+ {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"},
+ {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"},
+ {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"},
+ {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"},
+ {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"},
+ {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"},
+ {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"},
+ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
+]
+
[[package]]
name = "xmltodict"
version = "0.13.0"
@@ -4212,4 +4292,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata]
lock-version = "2.0"
python-versions = "~3.10.9"
-content-hash = "62653f4a581d32ac1678c8454f1100320096e8166aca1599cffd6fd3f72cfb4b"
+content-hash = "d95fee17c6e12a5dbec8ad0bdb8e256aadee291d2c2306c4b4f11e3db07fe006"
diff --git a/pyproject.toml b/pyproject.toml
index 9844f1c756..e2dcebd99b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,36 +23,37 @@ build-backend = "poetry.core.masonry.api"
python = "~3.10.9"
apig-wsgi = "2.18.0"
boto = "2.49.0"
-cffi = "1.15.1"
+cffi = "1.16.0"
celery = {extras = ["sqs"], version = "5.3.6"}
docopt = "0.6.2"
environs = "9.5.0" # pyup: <9.3.3 # marshmallow v3 throws errors"
fido2 = "0.9.3"
Flask-Bcrypt = "1.0.1"
-flask-marshmallow = "0.14.0"
Flask-Migrate = "2.7.0"
Flask-SQLAlchemy = { git = "https://github.com/pallets-eco/flask-sqlalchemy.git", rev = "500e732dd1b975a56ab06a46bd1a20a21e682262"}
#git+https://github.com/mitsuhiko/flask-sqlalchemy.git@500e732dd1b975a56ab06a46bd1a20a21e682262#egg=Flask-SQLAlchemy==2.3.2.dev20190108
Flask = "2.3.3"
click-datetime = "0.2"
gevent = "23.9.1"
+
gunicorn = "20.1.0"
-iso8601 = "2.0.0"
+iso8601 = "2.1.0"
jsonschema = "3.2.0"
marshmallow-sqlalchemy = "0.29.0"
-marshmallow = "3.20.2"
+marshmallow = "3.21.0"
python-magic = "0.4.27"
psycopg2-binary = "2.9.9"
PyJWT = "2.8.0"
pytz = "2021.3"
PyYAML = "6.0.1"
+
+cachelib = "0.12.0"
SQLAlchemy = "1.4.52"
-cachelib = "0.10.2"
-newrelic = "6.10.0.165"
+newrelic = "8.10.0"
notifications-python-client = "6.4.1"
python-dotenv = "1.0.1"
pwnedpasswords = "2.0.0"
-tldextract = "3.4.4"
+tldextract = "3.5.0"
nanoid = "2.0.0"
unidecode = "1.3.8"
more-itertools = "8.14.0"
@@ -60,46 +61,49 @@ more-itertools = "8.14.0"
awscli-cwlogs = "1.4.6"
aws-embedded-metrics = "1.0.8"
# Putting upgrade on hold due to new version introducing breaking changes
-Werkzeug = "2.3.7"
+Werkzeug = "3.0.3"
MarkupSafe = "2.1.5"
# REVIEW: v2 is using sha512 instead of sha1 by default (in v1)
-itsdangerous = "2.1.2"
-notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.2.4" }
+itsdangerous = "2.2.0"
+notifications-utils = { git = "https://github.com/cds-snc/notifier-utils.git", tag = "52.2.7" }
+
# rsa = "4.9 # awscli 1.22.38 depends on rsa<4.8
-typing-extensions = "4.7.1"
+typing-extensions = "4.10.0"
greenlet = "2.0.2"
simple-salesforce = "^1.12.3"
# Pinned dependencies
certifi = "^2023.7.22" # pinned for security reasons: https://github.com/cds-snc/notification-api/security/dependabot/119
idna = "2.10" # pinned to align with test moto dependency requirements (for <=2.9)
+flask-marshmallow = "0.14.0"
+aws-xray-sdk = "^2.14.0"
[tool.poetry.group.test.dependencies]
flake8 = "6.1.0"
-isort = "5.12.0"
-moto = "4.1.11"
+isort = "5.13.2"
+moto = "4.2.14"
idna = "2.10"
pytest = "7.4.4"
pytest-env = "0.8.2"
-pytest-mock = "3.11.1"
+pytest-mock = "3.12.0"
pytest-cov = "3.0.0"
coveralls = "3.3.1"
pytest-xdist = "2.5.0"
-freezegun = "1.2.2"
+freezegun = "1.4.0"
requests-mock = "1.11.0"
# optional requirements for jsonschema
strict-rfc3339 = "0.7"
rfc3987 = "1.3.8"
# used for creating manifest file locally
jinja2-cli = { extras = ["yaml"], version = "0.8.2" }
-black = "23.7.0"
-locust = "2.16.1"
+black = "23.12.1"
+locust = "2.23.1"
mypy = "1.5"
sqlalchemy-stubs = "0.4"
sqlalchemy2-stubs = "0.0.2a38"
networkx = "2.8.8" # not directly required, pinned by Snyk to avoid a vulnerability
-pytest-mock-resources = { extras = ["redis"], version = "2.9.2" }
-types-boto = "2.49.18.20240205"
+pytest-mock-resources = { extras = ["redis"], version = "2.10.0" }
+types-boto = "2.49.18.9"
types-mock = "4.0.15.2"
types-python-dateutil = "2.8.19.20240106"
types-pytz = "2022.7.1.2"
diff --git a/scripts/callManifestsRollout.sh b/scripts/callManifestsRollout.sh
deleted file mode 100755
index 29229ea093..0000000000
--- a/scripts/callManifestsRollout.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-GITHUB_SHA=$1
-PAYLOAD="{\"ref\":\"main\",\"inputs\":{\"docker_sha\":\"$GITHUB_SHA\"}}"
-
-
-RESPONSE=$(curl -w '%{http_code}\n' \
- -o /dev/null -s \
- -L -X POST -H "Accept: application/vnd.github+json" \
- -H "Authorization: Bearer $WORKFLOW_PAT" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- https://api.github.com/repos/cds-snc/notification-manifests/actions/workflows/api-rollout-k8s-staging.yaml/dispatches \
- -d "$PAYLOAD")
-
-if [ "$RESPONSE" != 204 ]; then
- echo "ERROR CALLING MANIFESTS ROLLOUT: HTTP RESPONSE: $RESPONSE"
- exit 1
-fi
diff --git a/tests/app/clients/test_aws_pinpoint.py b/tests/app/clients/test_aws_pinpoint.py
index ad7546d1ad..b913b1c39b 100644
--- a/tests/app/clients/test_aws_pinpoint.py
+++ b/tests/app/clients/test_aws_pinpoint.py
@@ -1,11 +1,13 @@
import pytest
from app import aws_pinpoint_client
+from app.clients.sms import SmsSendingVehicles
from tests.conftest import set_config_values
@pytest.mark.serial
-def test_send_sms_sends_to_default_pool(notify_api, mocker, sample_template):
+@pytest.mark.parametrize("template_id", [None, "uuid"])
+def test_send_sms_sends_to_default_pool(notify_api, mocker, sample_template, template_id):
boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True)
mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True)
to = "6135555555"
@@ -21,7 +23,7 @@ def test_send_sms_sends_to_default_pool(notify_api, mocker, sample_template):
"AWS_PINPOINT_SC_TEMPLATE_IDS": [],
},
):
- aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id)
+ aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=template_id)
boto_mock.send_text_message.assert_called_once_with(
DestinationPhoneNumber="+16135555555",
@@ -33,7 +35,7 @@ def test_send_sms_sends_to_default_pool(notify_api, mocker, sample_template):
@pytest.mark.serial
-def test_send_sms_sends_to_shortcode_pool(notify_api, mocker, sample_template):
+def test_send_sms_sends_sc_template_to_shortcode_pool_with_ff_false(notify_api, mocker, sample_template):
boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True)
mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True)
to = "6135555555"
@@ -47,6 +49,7 @@ def test_send_sms_sends_to_shortcode_pool(notify_api, mocker, sample_template):
"AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id",
"AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name",
"AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sample_template.id)],
+ "FF_TEMPLATE_CATEGORY": False,
},
):
aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id)
@@ -60,6 +63,34 @@ def test_send_sms_sends_to_shortcode_pool(notify_api, mocker, sample_template):
)
+@pytest.mark.serial
+def test_send_sms_sends_notify_sms_to_shortcode_pool(notify_api, mocker, sample_template):
+ boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True)
+ mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True)
+ to = "6135555555"
+ content = "foo"
+ reference = "ref"
+ with set_config_values(
+ notify_api,
+ {
+ "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id",
+ "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id",
+ "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name",
+ "AWS_PINPOINT_SC_TEMPLATE_IDS": [str(sample_template.id)],
+ "NOTIFY_SERVICE_ID": "notify",
+ },
+ ):
+ aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id, service_id="notify")
+
+ boto_mock.send_text_message.assert_called_once_with(
+ DestinationPhoneNumber="+16135555555",
+ OriginationIdentity="sc_pool_id",
+ MessageBody=content,
+ MessageType="TRANSACTIONAL",
+ ConfigurationSetName="config_set_name",
+ )
+
+
def test_send_sms_returns_raises_error_if_there_is_no_valid_number_is_found(notify_api, mocker):
mocker.patch.object(aws_pinpoint_client, "_client", create=True)
mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True)
@@ -71,3 +102,87 @@ def test_send_sms_returns_raises_error_if_there_is_no_valid_number_is_found(noti
aws_pinpoint_client.send_sms(to, content, reference)
assert "No valid numbers found for SMS delivery" in str(excinfo.value)
+
+
+def test_handles_opted_out_numbers(notify_api, mocker, sample_template):
+ conflict_error = aws_pinpoint_client._client.exceptions.ConflictException(
+ error_response={"Reason": "DESTINATION_PHONE_NUMBER_OPTED_OUT"}, operation_name="send_text_message"
+ )
+ mocker.patch("app.aws_pinpoint_client._client.send_text_message", side_effect=conflict_error)
+
+ to = "6135555555"
+ content = "foo"
+ reference = "ref"
+ assert aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id) == "opted_out"
+
+
+@pytest.mark.serial
+@pytest.mark.parametrize(
+ "FF_TEMPLATE_CATEGORY, sending_vehicle, expected_pool",
+ [
+ (False, None, "default_pool_id"),
+ (False, "long_code", "default_pool_id"),
+ (False, "short_code", "default_pool_id"),
+ (True, None, "default_pool_id"),
+ (True, "long_code", "default_pool_id"),
+ (True, "short_code", "sc_pool_id"),
+ ],
+)
+def test_respects_sending_vehicle_if_FF_enabled(
+ notify_api, mocker, sample_template, FF_TEMPLATE_CATEGORY, sending_vehicle, expected_pool
+):
+ boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True)
+ mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True)
+ to = "6135555555"
+ content = "foo"
+ reference = "ref"
+ sms_sending_vehicle = None if sending_vehicle is None else SmsSendingVehicles(sending_vehicle)
+
+ with set_config_values(
+ notify_api,
+ {
+ "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id",
+ "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id",
+ "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name",
+ "AWS_PINPOINT_SC_TEMPLATE_IDS": [],
+ "FF_TEMPLATE_CATEGORY": FF_TEMPLATE_CATEGORY,
+ },
+ ):
+ aws_pinpoint_client.send_sms(
+ to, content, reference=reference, template_id=sample_template.id, sending_vehicle=sms_sending_vehicle
+ )
+
+ boto_mock.send_text_message.assert_called_once_with(
+ DestinationPhoneNumber="+16135555555",
+ OriginationIdentity=expected_pool,
+ MessageBody=content,
+ MessageType="TRANSACTIONAL",
+ ConfigurationSetName="config_set_name",
+ )
+
+
+@pytest.mark.serial
+def test_send_sms_sends_international_without_pool_id(notify_api, mocker, sample_template):
+ boto_mock = mocker.patch.object(aws_pinpoint_client, "_client", create=True)
+ mocker.patch.object(aws_pinpoint_client, "statsd_client", create=True)
+ to = "+447512501324"
+ content = "foo"
+ reference = "ref"
+
+ with set_config_values(
+ notify_api,
+ {
+ "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id",
+ "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id",
+ "AWS_PINPOINT_CONFIGURATION_SET_NAME": "config_set_name",
+ "AWS_PINPOINT_SC_TEMPLATE_IDS": [],
+ },
+ ):
+ aws_pinpoint_client.send_sms(to, content, reference=reference, template_id=sample_template.id)
+
+ boto_mock.send_text_message.assert_called_once_with(
+ DestinationPhoneNumber="+447512501324",
+ MessageBody=content,
+ MessageType="TRANSACTIONAL",
+ ConfigurationSetName="config_set_name",
+ )
diff --git a/tests/app/clients/test_freshdesk.py b/tests/app/clients/test_freshdesk.py
index 3e8b56227d..8ccbdae256 100644
--- a/tests/app/clients/test_freshdesk.py
+++ b/tests/app/clients/test_freshdesk.py
@@ -181,6 +181,54 @@ def match_json(request):
assert response == 201
assert email_freshdesk_ticket_mock.not_called()
+ def test_send_ticket_other_category(self, email_freshdesk_ticket_mock, notify_api: Flask):
+ def match_json(request):
+ expected = {
+ "product_id": 42,
+ "subject": "New template category request",
+ "description": "New template category request from name (test@email.com):
"
+ "- Service id: 8624bd36-b70b-4d4b-a459-13e1f4770b92
"
+ "- New Template Category Request name: test category name
"
+ "- Template id request: http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92/templates/3ed1f07a-1b20-4f83-9a3e-158ab9b00103
"
+ "
"
+ "Demande de nouvelle catégorie de modèle de name (test@email.com):
"
+ "- Identifiant du service: 8624bd36-b70b-4d4b-a459-13e1f4770b92
"
+ "- Nom de la nouvelle catégorie de modèle demandée: test category name
"
+ "- Demande d'identifiant de modèle: http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92/templates/3ed1f07a-1b20-4f83-9a3e-158ab9b00103",
+ "email": "test@email.com",
+ "priority": 1,
+ "status": 2,
+ "tags": [],
+ }
+
+ encoded_auth = base64.b64encode(b"freshdesk-api-key:x").decode("ascii")
+ json_matches = request.json() == expected
+ basic_auth_header = request.headers.get("Authorization") == f"Basic {encoded_auth}"
+
+ return json_matches and basic_auth_header
+
+ with requests_mock.mock() as rmock:
+ rmock.request(
+ "POST",
+ "https://freshdesk-test.com/api/v2/tickets",
+ additional_matcher=match_json,
+ status_code=201,
+ )
+ data: Dict[str, Any] = {
+ "email_address": "test@email.com",
+ "name": "name",
+ "friendly_support_type": "New template category request",
+ "support_type": "new_template_category_request",
+ "service_id": "8624bd36-b70b-4d4b-a459-13e1f4770b92",
+ "template_category_name_en": "test category name",
+ "template_category_name_fr": "test category name",
+ "template_id_link": "http://localhost:6012/services/8624bd36-b70b-4d4b-a459-13e1f4770b92/templates/3ed1f07a-1b20-4f83-9a3e-158ab9b00103",
+ }
+ with notify_api.app_context():
+ response = freshdesk.Freshdesk(ContactRequest(**data)).send_ticket()
+ assert response == 201
+ assert email_freshdesk_ticket_mock.not_called()
+
def test_send_ticket_other(self, email_freshdesk_ticket_mock, notify_api: Flask):
def match_json(request):
expected = {
diff --git a/tests/app/commands/test_performance_platform_commands.py b/tests/app/commands/test_performance_platform_commands.py
index 4d255415e0..0ec22fc14a 100644
--- a/tests/app/commands/test_performance_platform_commands.py
+++ b/tests/app/commands/test_performance_platform_commands.py
@@ -1,11 +1,14 @@
from datetime import datetime
-from app.commands import backfill_performance_platform_totals, backfill_processing_time
+from app.commands.deprecated import (
+ backfill_performance_platform_totals,
+ backfill_processing_time,
+)
# This test assumes the local timezone is EST
def test_backfill_processing_time_works_for_correct_dates(mocker, notify_api):
- send_mock = mocker.patch("app.commands.send_processing_time_for_start_and_end")
+ send_mock = mocker.patch("app.commands.deprecated.send_processing_time_for_start_and_end")
# backfill_processing_time is a click.Command object - if you try invoking the callback on its own, it
# throws a `RuntimeError: There is no active click context.` - so get at the original function using __wrapped__
@@ -18,7 +21,7 @@ def test_backfill_processing_time_works_for_correct_dates(mocker, notify_api):
def test_backfill_totals_works_for_correct_dates(mocker, notify_api):
- send_mock = mocker.patch("app.commands.send_total_sent_notifications_to_performance_platform")
+ send_mock = mocker.patch("app.commands.deprecated.send_total_sent_notifications_to_performance_platform")
# backfill_processing_time is a click.Command object - if you try invoking the callback on its own, it
# throws a `RuntimeError: There is no active click context.` - so get at the original function using __wrapped__
diff --git a/tests/app/conftest.py b/tests/app/conftest.py
index 3088d1e88e..a0b428eed0 100644
--- a/tests/app/conftest.py
+++ b/tests/app/conftest.py
@@ -21,6 +21,7 @@
from app.dao.organisation_dao import dao_create_organisation
from app.dao.provider_rates_dao import create_provider_rates
from app.dao.services_dao import dao_add_user_to_service, dao_create_service
+from app.dao.template_categories_dao import dao_create_template_category
from app.dao.templates_dao import dao_create_template
from app.dao.users_dao import create_secret_code, create_user_code
from app.history_meta import create_history
@@ -52,6 +53,7 @@
ServiceEmailReplyTo,
ServiceSafelist,
Template,
+ TemplateCategory,
TemplateHistory,
)
from tests import create_authorization_header
@@ -230,6 +232,98 @@ def _sample_service_custom_letter_contact_block(sample_service):
return sample_service
+@pytest.fixture(scope="function")
+def sample_template_category_with_templates(notify_db, notify_db_session, sample_template_category):
+ create_sample_template(notify_db, notify_db_session, template_category=sample_template_category)
+ create_sample_template(notify_db, notify_db_session, template_category=sample_template_category)
+ return sample_template_category
+
+
+@pytest.fixture(scope="function")
+def populate_generic_categories(notify_db_session):
+ generic_categories = [
+ {
+ "id": current_app.config["DEFAULT_TEMPLATE_CATEGORY_LOW"],
+ "name_en": "Low Category (Bulk)",
+ "name_fr": "Catégorie Basse (En Vrac)",
+ "sms_process_type": "low",
+ "email_process_type": "low",
+ "hidden": True,
+ },
+ {
+ "id": current_app.config["DEFAULT_TEMPLATE_CATEGORY_MEDIUM"],
+ "name_en": "Medium Category (Normal)",
+ "name_fr": "Catégorie Moyenne (Normale)",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": True,
+ },
+ {
+ "id": current_app.config["DEFAULT_TEMPLATE_CATEGORY_HIGH"],
+ "name_en": "High Category (Priority)",
+ "name_fr": "Catégorie Haute (Priorité)",
+ "sms_process_type": "high",
+ "email_process_type": "high",
+ "hidden": True,
+ },
+ ]
+ for category in generic_categories:
+ dao_create_template_category(TemplateCategory(**category))
+
+ yield
+
+
+@pytest.fixture(scope="function")
+def sample_template_category(
+ notify_db,
+ notify_db_session,
+ name_en="Category Name",
+ name_fr="Category Name (FR)",
+ description_en="Category Description",
+ description_fr="Category Description (FR)",
+ sms_process_type="normal",
+ email_process_type="normal",
+ hidden=False,
+):
+ return create_template_category(
+ notify_db,
+ notify_db_session,
+ name_en="Category Name",
+ name_fr="Category Name (FR)",
+ description_en="Category Description",
+ description_fr="Category Description (FR)",
+ sms_process_type="normal",
+ email_process_type="normal",
+ hidden=False,
+ )
+
+
+def create_template_category(
+ notify_db,
+ notify_db_session,
+ name_en="Category Name",
+ name_fr="Category Name (FR)",
+ description_en="Category Description",
+ description_fr="Category Description (FR)",
+ sms_process_type="normal",
+ email_process_type="normal",
+ hidden=False,
+):
+ data = {
+ "name_en": name_en,
+ "name_fr": name_fr,
+ "description_en": description_en,
+ "description_fr": description_fr,
+ "sms_process_type": sms_process_type,
+ "email_process_type": email_process_type,
+ "hidden": hidden,
+ }
+ template_category = TemplateCategory(**data)
+ dao_create_template_category(template_category)
+
+ return template_category
+
+
def create_sample_template(
notify_db,
notify_db_session,
@@ -241,6 +335,7 @@ def create_sample_template(
subject_line="Subject",
user=None,
service=None,
+ template_category=None,
created_by=None,
process_type="normal",
permissions=[EMAIL_TYPE, SMS_TYPE],
@@ -268,6 +363,11 @@ def create_sample_template(
data.update({"subject": subject_line})
if template_type == "letter":
data["postage"] = "second"
+ if template_category:
+ data["template_category"] = template_category
+ else:
+ cat = create_template_category(notify_db, notify_db_session, name_en=str(uuid.uuid4), name_fr=str(uuid.uuid4))
+ data.update({"template_category_id": cat.id})
template = Template(**data)
dao_create_template(template)
@@ -303,6 +403,42 @@ def sample_template(
service=None,
created_by=None,
process_type="normal",
+ template_category=None,
+ permissions=[EMAIL_TYPE, SMS_TYPE],
+ )
+
+
+@pytest.fixture(scope="function")
+def sample_template_with_priority_override(
+ notify_db,
+ notify_db_session,
+ sample_template_category,
+ template_name="Template Name",
+ template_type="sms",
+ content="This is a template:\nwith a newline",
+ archived=False,
+ hidden=False,
+ subject_line="Subject",
+ user=None,
+ service=None,
+ created_by=None,
+ process_type="priority",
+ permissions=[EMAIL_TYPE, SMS_TYPE],
+):
+ return create_sample_template(
+ notify_db,
+ notify_db_session,
+ template_name="Template Name",
+ template_type="sms",
+ content="This is a template:\nwith a newline",
+ archived=False,
+ hidden=False,
+ subject_line="Subject",
+ user=None,
+ service=None,
+ created_by=None,
+ process_type="priority",
+ template_category=sample_template_category,
permissions=[EMAIL_TYPE, SMS_TYPE],
)
diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py
index 6acce65192..a2d73afdd9 100644
--- a/tests/app/dao/test_provider_details_dao.py
+++ b/tests/app/dao/test_provider_details_dao.py
@@ -29,7 +29,7 @@ def test_can_get_sms_non_international_providers(restore_provider_details):
def test_can_get_sms_international_providers(restore_provider_details):
sms_providers = get_provider_details_by_notification_type("sms", True)
- assert len(sms_providers) == 1
+ assert len(sms_providers) == 3
assert all("sms" == prov.notification_type for prov in sms_providers)
assert all(prov.supports_international for prov in sms_providers)
@@ -291,7 +291,7 @@ def test_dao_get_provider_stats(notify_db_session):
assert result[1].identifier == "sns"
assert result[1].display_name == "AWS SNS"
- assert result[1].supports_international is False
+ assert result[1].supports_international is True
assert result[1].active is True
assert result[1].current_month_billable_sms == 4
@@ -312,6 +312,6 @@ def test_dao_get_provider_stats(notify_db_session):
assert result[5].identifier == "pinpoint"
assert result[5].notification_type == "sms"
- assert result[5].supports_international is False
+ assert result[5].supports_international is True
assert result[5].active is True
assert result[5].current_month_billable_sms == 0
diff --git a/tests/app/dao/test_template_categories_dao.py b/tests/app/dao/test_template_categories_dao.py
new file mode 100644
index 0000000000..3ae33d1561
--- /dev/null
+++ b/tests/app/dao/test_template_categories_dao.py
@@ -0,0 +1,393 @@
+import pytest
+from flask import current_app
+
+from app.dao.template_categories_dao import (
+ dao_create_template_category,
+ dao_delete_template_category_by_id,
+ dao_get_all_template_categories,
+ dao_get_template_category_by_id,
+ dao_get_template_category_by_template_id,
+ dao_update_template_category,
+)
+from app.dao.templates_dao import dao_create_template
+from app.errors import InvalidRequest
+from app.models import BULK, NORMAL, Template, TemplateCategory
+from tests.app.conftest import create_sample_template
+
+
+class TestCreateTemplateCategory:
+ def test_create_template_category(self, notify_db_session):
+ data = {
+ "name_en": "english",
+ "name_fr": "french",
+ "description_en": "english description",
+ "description_fr": "french description",
+ "sms_process_type": NORMAL,
+ "email_process_type": NORMAL,
+ "hidden": False,
+ "sms_sending_vehicle": "short_code",
+ }
+
+ template_category = TemplateCategory(**data)
+ dao_create_template_category(template_category)
+
+ temp_cat = dao_get_all_template_categories()
+ assert TemplateCategory.query.count() == 1
+ assert len(temp_cat) == 1
+ assert temp_cat[0].sms_sending_vehicle == "short_code"
+
+ def test_create_template_category_with_no_sms_sending_vehicle(self, notify_db_session):
+ data = {
+ "name_en": "english",
+ "name_fr": "french",
+ "description_en": "english description",
+ "description_fr": "french description",
+ "sms_process_type": NORMAL,
+ "email_process_type": NORMAL,
+ "hidden": False,
+ }
+
+ template_category = TemplateCategory(**data)
+ dao_create_template_category(template_category)
+
+ temp_cat = dao_get_all_template_categories()
+ assert TemplateCategory.query.count() == 1
+ assert len(temp_cat) == 1
+ assert temp_cat[0].sms_sending_vehicle == "long_code" # default value
+
+
+@pytest.mark.parametrize(
+ "category, updated_category",
+ [
+ (
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "description_en": "english description",
+ "description_fr": "french description",
+ "sms_process_type": NORMAL,
+ "email_process_type": NORMAL,
+ "hidden": False,
+ },
+ {
+ "name_en": "new english",
+ "name_fr": "new french",
+ "description_en": "new english description",
+ "description_fr": "new french description",
+ "sms_process_type": BULK,
+ "email_process_type": BULK,
+ "hidden": True,
+ },
+ )
+ ],
+)
+def test_update_template_category(notify_db_session, category, updated_category):
+ template_category = TemplateCategory(**category)
+ dao_create_template_category(template_category)
+
+ for key, value in updated_category.items():
+ setattr(template_category, key, value)
+
+ dao_update_template_category(template_category)
+
+ fetched_category = dao_get_all_template_categories()[0]
+
+ assert fetched_category.id == template_category.id
+ for key, value in updated_category.items():
+ assert getattr(fetched_category, key) == value
+
+
+@pytest.mark.parametrize(
+ "category, template",
+ [
+ (
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "description_en": "english description",
+ "description_fr": "french description",
+ "sms_process_type": NORMAL,
+ "email_process_type": NORMAL,
+ "hidden": False,
+ },
+ {
+ "name": "Sample Template",
+ "template_type": "email",
+ "content": "Template content",
+ },
+ )
+ ],
+)
+def test_dao_get_template_category_by_template_id(category, template, notify_db_session, sample_service, sample_user):
+ template_category = TemplateCategory(**category)
+ dao_create_template_category(template_category)
+
+ template = Template(**template)
+ template.service = sample_service
+ template.created_by = sample_user
+ template.template_category = template_category
+ dao_create_template(template)
+
+ assert dao_get_template_category_by_template_id(template.id) == template_category
+
+
+def test_get_template_category_by_id(notify_db_session):
+ data = {
+ "name_en": "english",
+ "name_fr": "french",
+ "description_en": "english description",
+ "description_fr": "french description",
+ "sms_process_type": NORMAL,
+ "email_process_type": NORMAL,
+ "hidden": False,
+ }
+
+ template_category = TemplateCategory(**data)
+ dao_create_template_category(template_category)
+
+ assert dao_get_template_category_by_id(template_category.id) == template_category
+
+
+@pytest.mark.parametrize(
+ "template_type, hidden, expected_count, categories_to_insert",
+ [
+ (
+ None,
+ None,
+ 2,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": False,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": False,
+ },
+ ],
+ ),
+ # Filter by template type SMS
+ (
+ "sms",
+ None,
+ 2,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": False,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": False,
+ },
+ ],
+ ),
+ # Filter by template type email
+ (
+ "email",
+ None,
+ 2,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": False,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": False,
+ },
+ ],
+ ),
+ # Filter by hidden False
+ (
+ None,
+ False,
+ 1,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": False,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": True,
+ },
+ ],
+ ),
+ # Filter by hidden True
+ (
+ None,
+ True,
+ 1,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": False,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": True,
+ },
+ ],
+ ),
+ # Filter by template type SMS and hidden False
+ (
+ "sms",
+ False,
+ 1,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": False,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": True,
+ },
+ ],
+ ),
+ (
+ "sms",
+ False,
+ 0,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": True,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": True,
+ },
+ ],
+ ),
+ # Filter by template type email and hidden True
+ (
+ "email",
+ True,
+ 1,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": False,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": True,
+ },
+ ],
+ ),
+ (
+ "email",
+ True,
+ 0,
+ [
+ {
+ "name_en": "english",
+ "name_fr": "french",
+ "sms_process_type": "normal",
+ "email_process_type": "normal",
+ "hidden": False,
+ },
+ {
+ "name_en": "english2",
+ "name_fr": "french2",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": False,
+ },
+ ],
+ ),
+ ],
+)
+def test_get_all_template_categories_with_filters(
+ template_type, hidden, expected_count, categories_to_insert, notify_db, notify_db_session
+):
+ for category_data in categories_to_insert:
+ template_category = TemplateCategory(**category_data)
+ dao_create_template_category(template_category)
+
+ create_sample_template(notify_db, notify_db_session, template_type="email", template_category=template_category)
+ create_sample_template(notify_db, notify_db_session, template_type="sms", template_category=template_category)
+
+ retrieved_categories = dao_get_all_template_categories(template_type=template_type, hidden=hidden)
+
+ assert len(retrieved_categories) == expected_count
+
+
+def test_dao_delete_template_category_by_id_should_delete_category_when_no_associated_templates(
+ notify_db_session, sample_template_category
+):
+ dao_delete_template_category_by_id(sample_template_category.id)
+
+ assert TemplateCategory.query.count() == 0
+
+
+def test_dao_delete_template_category_by_id_should_not_allow_deletion_when_associated_with_template(
+ notify_db, notify_db_session, sample_template_category
+):
+ create_sample_template(notify_db, notify_db_session, template_category=sample_template_category)
+
+ with pytest.raises(InvalidRequest):
+ dao_delete_template_category_by_id(sample_template_category.id)
+
+ assert TemplateCategory.query.count() == 1
+
+
+def test_dao_delete_template_category_by_id_should_allow_deletion_with_cascade_when_associated_with_template(
+ notify_db, notify_db_session, sample_template_category, populate_generic_categories
+):
+ template = create_sample_template(notify_db, notify_db_session, template_category=sample_template_category)
+
+ dao_delete_template_category_by_id(sample_template_category.id, cascade=True)
+ # 3 here because we have 3 generic defaut categories that will remain post-delete
+ assert TemplateCategory.query.count() == 3
+ assert str(template.template_category_id) == current_app.config["DEFAULT_TEMPLATE_CATEGORY_MEDIUM"]
diff --git a/tests/app/dao/test_templates_dao.py b/tests/app/dao/test_templates_dao.py
index ce6c07bdc7..516ceb6220 100644
--- a/tests/app/dao/test_templates_dao.py
+++ b/tests/app/dao/test_templates_dao.py
@@ -16,6 +16,7 @@
dao_get_template_versions,
dao_redact_template,
dao_update_template,
+ dao_update_template_category,
dao_update_template_reply_to,
)
from app.models import Template, TemplateHistory, TemplateRedacted
@@ -490,3 +491,16 @@ def test_template_postage_constraint_on_update(sample_service, sample_user):
created.postage = "third"
with pytest.raises(expected_exception=SQLAlchemyError):
dao_update_template(created)
+
+
+def test_dao_update_template_category(sample_template, sample_template_category):
+ dao_update_template_category(sample_template.id, sample_template_category.id)
+
+ updated_template = Template.query.get(sample_template.id)
+ assert updated_template.template_category_id == sample_template_category.id
+ assert updated_template.updated_at is not None
+ assert updated_template.version == 2
+
+ history = TemplateHistory.query.filter_by(id=sample_template.id, version=updated_template.version).one()
+ assert not history.template_category_id
+ assert history.updated_at == updated_template.updated_at
diff --git a/tests/app/db.py b/tests/app/db.py
index c9ff33427c..f06478ab54 100644
--- a/tests/app/db.py
+++ b/tests/app/db.py
@@ -188,6 +188,7 @@ def create_template(
hidden=False,
archived=False,
folder=None,
+ template_category=None,
postage=None,
process_type="normal",
):
@@ -200,6 +201,7 @@ def create_template(
"reply_to": reply_to,
"hidden": hidden,
"folder": folder,
+ "template_category": template_category,
"process_type": process_type,
}
if template_type == LETTER_TYPE:
diff --git a/tests/app/delivery/test_send_to_providers.py b/tests/app/delivery/test_send_to_providers.py
index 0768d98cc9..56b0eef3de 100644
--- a/tests/app/delivery/test_send_to_providers.py
+++ b/tests/app/delivery/test_send_to_providers.py
@@ -110,6 +110,40 @@ def test_should_use_sns_for_sms_if_sending_to_the_US(self, restore_provider_deta
provider = send_to_providers.provider_to_use("sms", "1234", "+17065551234")
assert provider.name == "sns"
+ @pytest.mark.serial
+ def test_should_use_pinpoint_for_sms_if_sending_outside_zone_1(self, restore_provider_details, notify_api):
+ with set_config_values(
+ notify_api,
+ {
+ "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id",
+ "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id",
+ },
+ ):
+ provider = send_to_providers.provider_to_use("sms", "1234", "+447512501324", international=True)
+ assert provider.name == "pinpoint"
+
+ def test_should_use_sns_for_sms_if_sending_to_non_CA_zone_1(self, restore_provider_details, notify_api):
+ with set_config_values(
+ notify_api,
+ {
+ "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id",
+ "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id",
+ },
+ ):
+ provider = send_to_providers.provider_to_use("sms", "1234", "+16715550123")
+ assert provider.name == "sns"
+
+ def test_should_use_sns_for_sms_if_match_fails(self, restore_provider_details, notify_api):
+ with set_config_values(
+ notify_api,
+ {
+ "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id",
+ "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id",
+ },
+ ):
+ provider = send_to_providers.provider_to_use("sms", "1234", "8695550123") # This number fails our matching code
+ assert provider.name == "sns"
+
@pytest.mark.parametrize("sc_pool_id, default_pool_id", [("", "default_pool_id"), ("sc_pool_id", "")])
def test_should_use_sns_if_pinpoint_not_configured(self, restore_provider_details, notify_api, sc_pool_id, default_pool_id):
with set_config_values(
@@ -156,6 +190,31 @@ def test_should_return_highest_priority_active_provider(restore_provider_details
assert send_to_providers.provider_to_use("sms", "1234").name == first.identifier
+def test_should_handle_opted_out_phone_numbers_if_using_pinpoint(notify_api, sample_template, mocker):
+ mocker.patch("app.aws_pinpoint_client.send_sms", return_value="opted_out")
+ db_notification = save_notification(
+ create_notification(
+ template=sample_template,
+ to_field="+16135551234",
+ status="created",
+ reply_to_text=sample_template.service.get_default_sms_sender(),
+ )
+ )
+
+ with set_config_values(
+ notify_api,
+ {
+ "AWS_PINPOINT_SC_POOL_ID": "sc_pool_id",
+ "AWS_PINPOINT_DEFAULT_POOL_ID": "default_pool_id",
+ },
+ ):
+ send_to_providers.send_sms_to_provider(db_notification)
+
+ notification = Notification.query.filter_by(id=db_notification.id).one()
+ assert notification.status == "permanent-failure"
+ assert notification.provider_response == "Phone number is opted out"
+
+
def test_should_send_personalised_template_to_correct_sms_provider_and_persist(sample_sms_template_with_html, mocker):
db_notification = save_notification(
create_notification(
@@ -178,6 +237,8 @@ def test_should_send_personalised_template_to_correct_sms_provider_and_persist(s
reference=str(db_notification.id),
sender=current_app.config["FROM_NUMBER"],
template_id=sample_sms_template_with_html.id,
+ service_id=sample_sms_template_with_html.service_id,
+ sending_vehicle=None,
)
notification = Notification.query.filter_by(id=db_notification.id).one()
@@ -397,6 +458,8 @@ def test_send_sms_should_use_template_version_from_notification_not_latest(sampl
reference=str(db_notification.id),
sender=current_app.config["FROM_NUMBER"],
template_id=sample_template.id,
+ service_id=sample_template.service_id,
+ sending_vehicle=ANY,
)
persisted_notification = notifications_dao.get_notification_by_id(db_notification.id)
@@ -475,7 +538,9 @@ def test_should_send_sms_with_downgraded_content(notify_db_session, mocker):
send_to_providers.send_sms_to_provider(db_notification)
- aws_sns_client.send_sms.assert_called_once_with(to=ANY, content=gsm_message, reference=ANY, sender=ANY, template_id=ANY)
+ aws_sns_client.send_sms.assert_called_once_with(
+ to=ANY, content=gsm_message, reference=ANY, sender=ANY, template_id=ANY, service_id=ANY, sending_vehicle=ANY
+ )
def test_send_sms_should_use_service_sms_sender(sample_service, sample_template, mocker):
@@ -489,7 +554,13 @@ def test_send_sms_should_use_service_sms_sender(sample_service, sample_template,
)
app.aws_sns_client.send_sms.assert_called_once_with(
- to=ANY, content=ANY, reference=ANY, sender=sms_sender.sms_sender, template_id=ANY
+ to=ANY,
+ content=ANY,
+ reference=ANY,
+ sender=sms_sender.sms_sender,
+ template_id=ANY,
+ service_id=ANY,
+ sending_vehicle=ANY,
)
@@ -862,6 +933,8 @@ def test_should_handle_sms_sender_and_prefix_message(
to=ANY,
reference=ANY,
template_id=ANY,
+ service_id=ANY,
+ sending_vehicle=ANY,
)
diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py
index 5ca28a1177..142c5aefa3 100644
--- a/tests/app/template/test_rest.py
+++ b/tests/app/template/test_rest.py
@@ -1558,3 +1558,70 @@ def test_should_template_be_redacted():
dao_update_organisation(some_org.id, organisation_type="province_or_territory")
assert should_template_be_redacted(some_org)
+
+
+def test_update_templates_category(sample_template, sample_template_category, admin_request):
+ admin_request.post(
+ "template.update_templates_category",
+ service_id=sample_template.service_id,
+ template_id=sample_template.id,
+ template_category_id=sample_template_category.id,
+ _expected_status=200,
+ )
+
+ template = dao_get_template_by_id(sample_template.id)
+
+ assert template.template_category.id == sample_template_category.id
+
+
+class TestTemplateCategory:
+ DEFAULT_TEMPLATE_CATEGORY_LOW = "0dda24c2-982a-4f44-9749-0e38b2607e89"
+ DEFAULT_TEMPLATE_CATEGORY_MEDIUM = "f75d6706-21b7-437e-b93a-2c0ab771e28e"
+
+ # ensure that the process_type is overridden when a user changes categories
+ @pytest.mark.parametrize(
+ "template_category_id, expected_process_type",
+ [
+ # category doesnt change, process_type should remain as priority
+ (
+ "unchanged",
+ "priority",
+ ),
+ # category changes, process_type should be removed
+ (
+ DEFAULT_TEMPLATE_CATEGORY_MEDIUM,
+ None,
+ ),
+ ],
+ )
+ def test_process_type_should_be_reset_when_template_category_updated(
+ self,
+ sample_service,
+ sample_template_with_priority_override,
+ sample_user,
+ admin_request,
+ populate_generic_categories,
+ template_category_id,
+ expected_process_type,
+ notify_api,
+ ):
+ with set_config_values(notify_api, {"FF_TEMPLATE_CATEGORY": "true"}): # TODO remove statement when FF removed
+ template_orig = dao_get_template_by_id(sample_template_with_priority_override.id)
+
+ calculated_tc = (
+ template_category_id if template_category_id != "unchanged" else str(template_orig.template_category_id)
+ )
+ admin_request.post(
+ "template.update_template",
+ service_id=sample_template_with_priority_override.service_id,
+ template_id=sample_template_with_priority_override.id,
+ _data={
+ "template_category_id": calculated_tc,
+ "redact_personalisation": False,
+ },
+ _expected_status=200,
+ )
+ template = dao_get_template_by_id(sample_template_with_priority_override.id)
+
+ assert str(template.template_category_id) == calculated_tc
+ assert template.process_type == expected_process_type
diff --git a/tests/app/template/test_template_category_rest.py b/tests/app/template/test_template_category_rest.py
new file mode 100644
index 0000000000..2669cc5978
--- /dev/null
+++ b/tests/app/template/test_template_category_rest.py
@@ -0,0 +1,149 @@
+import pytest
+from flask import url_for
+
+from tests import create_authorization_header
+from tests.app.conftest import create_sample_template
+
+
+def test_should_create_new_template_category(client, notify_db, notify_db_session):
+ data = {
+ "name_en": "new english",
+ "name_fr": "new french",
+ "description_en": "new english description",
+ "description_fr": "new french description",
+ "sms_process_type": "bulk",
+ "email_process_type": "bulk",
+ "hidden": True,
+ }
+ auth_header = create_authorization_header()
+
+ response = client.post(
+ url_for("template_category.create_template_category"),
+ headers=[("Content-Type", "application/json"), auth_header],
+ json=data,
+ )
+
+ assert response.status_code == 201
+ assert response.json["template_category"]["name_en"] == "new english"
+ assert response.json["template_category"]["name_fr"] == "new french"
+ assert response.json["template_category"]["description_en"] == "new english description"
+ assert response.json["template_category"]["description_fr"] == "new french description"
+ assert response.json["template_category"]["sms_process_type"] == "bulk"
+ assert response.json["template_category"]["email_process_type"] == "bulk"
+ assert response.json["template_category"]["hidden"]
+
+
+def test_get_template_category_by_id(client, sample_template_category):
+ auth_header = create_authorization_header()
+ response = client.get(
+ url_for("template_category.get_template_category", template_category_id=sample_template_category.id),
+ headers=[("Content-Type", "application/json"), auth_header],
+ )
+
+ assert response.status_code == 200
+ assert response.json["template_category"]["name_en"] == sample_template_category.name_en
+ assert response.json["template_category"]["name_fr"] == sample_template_category.name_fr
+ assert response.json["template_category"]["description_en"] == sample_template_category.description_en
+ assert response.json["template_category"]["description_fr"] == sample_template_category.description_fr
+ assert response.json["template_category"]["sms_process_type"] == sample_template_category.sms_process_type
+ assert response.json["template_category"]["email_process_type"] == sample_template_category.email_process_type
+ assert response.json["template_category"]["hidden"] == sample_template_category.hidden
+
+
+def test_get_template_category_by_template_id(client, notify_db, notify_db_session, sample_template_category):
+ category = sample_template_category
+ template = create_sample_template(notify_db, notify_db_session, template_category=category)
+
+ auth_header = create_authorization_header()
+ endpoint = url_for("template_category.get_template_category_by_template_id", template_id=template.id)
+
+ response = client.get(
+ endpoint,
+ headers=[("Content-Type", "application/json"), auth_header],
+ )
+
+ assert response.status_code == 200
+ assert response.json["template_category"]["name_en"] == category.name_en
+ assert response.json["template_category"]["name_fr"] == category.name_fr
+ assert response.json["template_category"]["description_en"] == category.description_en
+ assert response.json["template_category"]["description_fr"] == category.description_fr
+ assert response.json["template_category"]["sms_process_type"] == category.sms_process_type
+ assert response.json["template_category"]["email_process_type"] == category.email_process_type
+ assert response.json["template_category"]["hidden"] == category.hidden
+
+
+@pytest.mark.parametrize(
+ "template_type, hidden, expected_status_code, expected_msg",
+ [
+ ("invalid_template_type", True, 400, "Invalid filter 'template_type', valid template_types: 'sms', 'email'"),
+ ("sms", "not_a_boolean", 200, None),
+ ("email", "True", 200, None),
+ ("email", "False", 200, None),
+ ("email", None, 200, None),
+ ("sms", "True", 200, None),
+ ("sms", "False", 200, None),
+ ("sms", None, 200, None),
+ (None, None, 200, None),
+ (None, "True", 200, None),
+ (None, "False", 200, None),
+ ],
+)
+def test_get_template_categories(
+ template_type,
+ hidden,
+ expected_status_code,
+ expected_msg,
+ sample_template_category,
+ client,
+ notify_db,
+ notify_db_session,
+ mocker,
+):
+ auth_header = create_authorization_header()
+
+ endpoint = url_for("template_category.get_template_categories", template_type=template_type, hidden=hidden)
+
+ mocker.patch("app.dao.template_categories_dao.dao_get_all_template_categories", return_value=[sample_template_category])
+
+ response = client.get(
+ endpoint,
+ headers=[("Content-Type", "application/json"), auth_header],
+ )
+
+ assert response.status_code == expected_status_code
+ if not expected_status_code == 200:
+ assert response.json["message"] == expected_msg
+
+
+@pytest.mark.parametrize(
+ "cascade, expected_status_code, expected_msg",
+ [
+ ("True", 204, ""),
+ ("False", 400, "Cannot delete categories associated with templates. Dissociate the category from templates first."),
+ ],
+)
+def test_delete_template_category_cascade(
+ cascade,
+ expected_status_code,
+ expected_msg,
+ client,
+ mocker,
+ sample_template_category_with_templates,
+ populate_generic_categories,
+):
+ auth_header = create_authorization_header()
+
+ endpoint = url_for(
+ "template_category.delete_template_category",
+ template_category_id=sample_template_category_with_templates.id,
+ cascade=cascade,
+ )
+
+ response = client.delete(
+ endpoint,
+ headers=[("Content-Type", "application/json"), auth_header],
+ )
+
+ assert response.status_code == expected_status_code
+ if expected_status_code == 400:
+ assert response.json["message"] == expected_msg
diff --git a/tests/app/test_model.py b/tests/app/test_model.py
index 1df0f58f2f..5e3df7387f 100644
--- a/tests/app/test_model.py
+++ b/tests/app/test_model.py
@@ -4,8 +4,10 @@
from app import signer_personalisation
from app.models import (
+ BULK,
EMAIL_TYPE,
MOBILE_TYPE,
+ NORMAL,
NOTIFICATION_CREATED,
NOTIFICATION_DELIVERED,
NOTIFICATION_FAILED,
@@ -16,10 +18,12 @@
NOTIFICATION_STATUS_TYPES_FAILED,
NOTIFICATION_TECHNICAL_FAILURE,
PRECOMPILED_TEMPLATE_NAME,
+ PRIORITY,
SMS_TYPE,
Notification,
ServiceSafelist,
)
+from tests.app.conftest import create_template_category
from tests.app.db import (
create_inbound_number,
create_letter_contact,
@@ -354,6 +358,34 @@ def test_template_folder_is_parent(sample_service):
assert not folders[1].is_parent_of(folders[0])
+@pytest.mark.parametrize(
+ "template_type, process_type, sms_process_type, email_process_type, expected_template_process_type",
+ [
+ (SMS_TYPE, None, NORMAL, BULK, NORMAL),
+ (EMAIL_TYPE, None, BULK, NORMAL, NORMAL),
+ (SMS_TYPE, BULK, PRIORITY, PRIORITY, BULK),
+ (EMAIL_TYPE, BULK, PRIORITY, PRIORITY, BULK),
+ ],
+)
+def test_template_process_type(
+ notify_db,
+ notify_db_session,
+ template_type,
+ process_type,
+ sms_process_type,
+ email_process_type,
+ expected_template_process_type,
+):
+ template_category = create_template_category(
+ notify_db, notify_db_session, sms_process_type=sms_process_type, email_process_type=email_process_type
+ )
+ template = create_template(
+ service=create_service(), template_type=template_type, process_type=process_type, template_category=template_category
+ )
+
+ assert template.template_process_type == expected_template_process_type
+
+
def test_fido2_key_serialization(sample_fido2_key):
json = sample_fido2_key.serialize()
assert json["name"] == sample_fido2_key.name
diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py
index bdffcf4cdb..45968e172a 100644
--- a/tests/app/user/test_rest.py
+++ b/tests/app/user/test_rest.py
@@ -980,6 +980,31 @@ def test_send_branding_request(client, sample_service, sample_organisation, mock
mocked_salesforce_client.engagement_update.assert_not_called()
+class TestFreshDeskRequestTickets:
+ def test_send_request_for_new_category(self, client, sample_service, sample_organisation, mocker):
+ sample_user = sample_service.users[0]
+ sample_service.organisation = sample_organisation
+ post_data = {
+ "service_name": sample_service.name,
+ "email_address": sample_user.email_address,
+ "service_id": str(sample_service.id),
+ "template_category_name_en": "test",
+ "template_category_name_fr": "test",
+ "template_id": "1234",
+ }
+ mocked_freshdesk = mocker.patch("app.user.rest.Freshdesk.send_ticket", return_value=201)
+ mocked_salesforce_client = mocker.patch("app.user.rest.salesforce_client")
+
+ resp = client.post(
+ url_for("user.send_new_template_category_request", user_id=str(sample_user.id)),
+ data=json.dumps(post_data),
+ headers=[("Content-Type", "application/json"), create_authorization_header()],
+ )
+ assert resp.status_code == 204
+ mocked_freshdesk.assert_called_once_with()
+ mocked_salesforce_client.engagement_update.assert_not_called()
+
+
def test_send_user_confirm_new_email_returns_204(client, sample_user, change_email_confirmation_template, mocker):
mocked = mocker.patch("app.celery.provider_tasks.deliver_email.apply_async")
new_email = "new_address@dig.gov.uk"
diff --git a/tests_cypress/package-lock.json b/tests_cypress/package-lock.json
index ef4c975b59..722e36d686 100644
--- a/tests_cypress/package-lock.json
+++ b/tests_cypress/package-lock.json
@@ -186,6 +186,40 @@
"strip-ansi": "^7.0.1"
}
},
+ "string-width-cjs": {
+ "version": "npm:string-width@4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true
+ },
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.1"
+ }
+ }
+ }
+ },
"strip-ansi": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
@@ -195,6 +229,23 @@
"ansi-regex": "^6.0.1"
}
},
+ "strip-ansi-cjs": {
+ "version": "npm:strip-ansi@6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.1"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true
+ }
+ }
+ },
"wrap-ansi": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
@@ -205,6 +256,60 @@
"string-width": "^5.0.1",
"strip-ansi": "^7.0.1"
}
+ },
+ "wrap-ansi-cjs": {
+ "version": "npm:wrap-ansi@7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true
+ },
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ }
+ },
+ "strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.1"
+ }
+ }
+ }
}
}
},
@@ -390,9 +495,9 @@
"dev": true
},
"axe-core": {
- "version": "4.7.2",
- "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.7.2.tgz",
- "integrity": "sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g==",
+ "version": "4.8.4",
+ "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.8.4.tgz",
+ "integrity": "sha512-CZLSKisu/bhJ2awW4kJndluz2HLZYIHh5Uy1+ZwDRkJi69811xgIXXfdU9HSLX0Th+ILrHj8qfL/5wzamsFtQg==",
"dev": true
},
"balanced-match": {
@@ -666,9 +771,9 @@
}
},
"cypress-axe": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/cypress-axe/-/cypress-axe-1.4.0.tgz",
- "integrity": "sha512-Ut7NKfzjyKm0BEbt2WxuKtLkIXmx6FD2j0RwdvO/Ykl7GmB/qRQkwbKLk3VP35+83hiIr8GKD04PDdrTK5BnyA=="
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/cypress-axe/-/cypress-axe-1.5.0.tgz",
+ "integrity": "sha512-Hy/owCjfj+25KMsecvDgo4fC/781ccL+e8p+UUYoadGVM2ogZF9XIKbiM6KI8Y3cEaSreymdD6ZzccbI2bY0lQ=="
},
"cypress-html-validate": {
"version": "5.1.2",
@@ -2125,17 +2230,6 @@
"strip-ansi": "^6.0.1"
}
},
- "string-width-cjs": {
- "version": "npm:string-width@4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "dev": true,
- "requires": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- }
- },
"string_decoder": {
"version": "0.10.31",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
@@ -2150,15 +2244,6 @@
"ansi-regex": "^5.0.1"
}
},
- "strip-ansi-cjs": {
- "version": "npm:strip-ansi@6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
- "requires": {
- "ansi-regex": "^5.0.1"
- }
- },
"strip-final-newline": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
@@ -2337,17 +2422,6 @@
"strip-ansi": "^6.0.0"
}
},
- "wrap-ansi-cjs": {
- "version": "npm:wrap-ansi@7.0.0",
- "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
- "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
- "dev": true,
- "requires": {
- "ansi-styles": "^4.0.0",
- "string-width": "^4.1.0",
- "strip-ansi": "^6.0.0"
- }
- },
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",