Skip to content

Commit

Permalink
Merge branch 'main' into feat/rework-callback-logging
Browse files Browse the repository at this point in the history
  • Loading branch information
jimleroyer authored Mar 12, 2024
2 parents 6fdcab8 + 00c44e3 commit 23aecfd
Show file tree
Hide file tree
Showing 12 changed files with 168 additions and 125 deletions.
51 changes: 11 additions & 40 deletions .github/workflows/docker.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ env:
DOCKER_ORG: public.ecr.aws/v6b8u5o6
DOCKER_SLUG: public.ecr.aws/v6b8u5o6/notify-api
KUBECTL_VERSION: '1.23.6'
WORKFLOW_PAT: ${{ secrets.WORKFLOW_GITHUB_PAT }}

permissions:
id-token: write # This is required for requesting the OIDC JWT
Expand All @@ -26,21 +27,14 @@ jobs:
unzip -q awscliv2.zip
sudo ./aws/install --update
aws --version
- name: Install kubectl
run: |
curl -LO https://storage.googleapis.com/kubernetes-release/release/v$KUBECTL_VERSION/bin/linux/amd64/kubectl
chmod +x ./kubectl
sudo mv ./kubectl /usr/local/bin/kubectl
kubectl version --client
mkdir -p $HOME/.kube
- name: Configure credentials to CDS public ECR using OIDC
uses: aws-actions/configure-aws-credentials@master
with:
role-to-assume: arn:aws:iam::283582579564:role/notification-api-apply
role-session-name: NotifyApiGitHubActions
aws-region: "us-east-1"

- name: Login to ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@5a88a04c91d5c6f97aae0d9be790e64d9b1d47b7 # v1.7.1
Expand All @@ -56,43 +50,19 @@ jobs:
-t $DOCKER_SLUG:${GITHUB_SHA::7} \
-t $DOCKER_SLUG:latest \
-f ci/Dockerfile .
- name: Publish
run: |
docker push $DOCKER_SLUG:latest && docker push $DOCKER_SLUG:${GITHUB_SHA::7}
- name: Configure credentials to Notify account using OIDC
uses: aws-actions/configure-aws-credentials@master
with:
role-to-assume: arn:aws:iam::239043911459:role/notification-api-apply
role-session-name: NotifyApiGitHubActions
aws-region: "ca-central-1"

- name: Get Kubernetes configuration
run: |
aws eks --region $AWS_REGION update-kubeconfig --name notification-canada-ca-staging-eks-cluster --kubeconfig $HOME/.kube/config
- name: Update images in staging
- name: Rollout in Kubernetes
run: |
kubectl set image deployment.apps/api api=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-beat celery-beat=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-sms celery-sms=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-primary celery-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-scalable celery-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-sms-send-primary celery-sms-send-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-sms-send-scalable celery-sms-send-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-email-send-primary celery-email-send-primary=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
kubectl set image deployment.apps/celery-email-send-scalable celery-email-send-scalable=$DOCKER_SLUG:${GITHUB_SHA::7} -n=notification-canada-ca --kubeconfig=$HOME/.kube/config
- name: Restart deployments in staging
run: |
kubectl rollout restart deployment/api -n notification-canada-ca
kubectl rollout restart deployment/celery-beat -n notification-canada-ca
kubectl rollout restart deployment/celery-sms -n notification-canada-ca
kubectl rollout restart deployment/celery-primary -n notification-canada-ca
kubectl rollout restart deployment/celery-scalable -n notification-canada-ca
kubectl rollout restart deployment/celery-sms-send-primary -n notification-canada-ca
kubectl rollout restart deployment/celery-sms-send-scalable -n notification-canada-ca
kubectl rollout restart deployment/celery-email-send-primary -n notification-canada-ca
kubectl rollout restart deployment/celery-email-send-scalable -n notification-canada-ca
PAYLOAD={\"ref\":\"main\",\"inputs\":{\"docker_sha\":\"${GITHUB_SHA::7}\"}}
curl -L -X POST -H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer $WORKFLOW_PAT" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/cds-snc/notification-manifests/actions/workflows/api-rollout-k8s-staging.yaml/dispatches \
-d $PAYLOAD
- name: my-app-install token
id: notify-pr-bot
Expand All @@ -118,3 +88,4 @@ jobs:
run: |
json="{'text':'<!here> CI is failing in <https://github.com/cds-snc/notification-api/actions/runs/${GITHUB_RUN_ID}|notification-api> !'}"
curl -X POST -H 'Content-type: application/json' --data "$json" ${{ secrets.SLACK_WEBHOOK }}
14 changes: 9 additions & 5 deletions app/celery/nightly_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,15 @@ def remove_letter_csv_files():


def _remove_csv_files(job_types):
jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types)
for job in jobs:
s3.remove_job_from_s3(job.service_id, job.id)
dao_archive_job(job)
current_app.logger.info("Job ID {} has been removed from s3.".format(job.id))
while True:
jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types, limit=20000)
if len(jobs) == 0:
break
current_app.logger.info("Archiving {} jobs.".format(len(jobs)))
for job in jobs:
s3.remove_job_from_s3(job.service_id, job.id)
dao_archive_job(job)
current_app.logger.info("Job ID {} has been removed from s3.".format(job.id))


@notify_celery.task(name="delete-sms-notifications")
Expand Down
15 changes: 9 additions & 6 deletions app/dao/jobs_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,16 +129,15 @@ def dao_update_job(job):
db.session.commit()


def dao_get_jobs_older_than_data_retention(notification_types):
def dao_get_jobs_older_than_data_retention(notification_types, limit=None):
flexible_data_retention = ServiceDataRetention.query.filter(
ServiceDataRetention.notification_type.in_(notification_types)
).all()
jobs = []
today = datetime.utcnow().date()
for f in flexible_data_retention:
end_date = today - timedelta(days=f.days_of_retention)

jobs.extend(
query = (
Job.query.join(Template)
.filter(
func.coalesce(Job.scheduled_for, Job.created_at) < end_date,
Expand All @@ -147,13 +146,15 @@ def dao_get_jobs_older_than_data_retention(notification_types):
Job.service_id == f.service_id,
)
.order_by(desc(Job.created_at))
.all()
)
if limit:
query = query.limit(limit)
jobs.extend(query.all())

end_date = today - timedelta(days=7)
for notification_type in notification_types:
services_with_data_retention = [x.service_id for x in flexible_data_retention if x.notification_type == notification_type]
jobs.extend(
query = (
Job.query.join(Template)
.filter(
func.coalesce(Job.scheduled_for, Job.created_at) < end_date,
Expand All @@ -162,8 +163,10 @@ def dao_get_jobs_older_than_data_retention(notification_types):
Job.service_id.notin_(services_with_data_retention),
)
.order_by(desc(Job.created_at))
.all()
)
if limit:
query = query.limit(limit - len(jobs))
jobs.extend(query.all())

return jobs

Expand Down
13 changes: 12 additions & 1 deletion app/dao/organisation_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,14 @@

from app import db
from app.dao.dao_utils import transactional, version_class
from app.models import Domain, InvitedOrganisationUser, Organisation, Service, User
from app.models import (
Domain,
EmailBranding,
InvitedOrganisationUser,
Organisation,
Service,
User,
)


def dao_get_organisations():
Expand Down Expand Up @@ -55,6 +62,10 @@ def dao_update_organisation(organisation_id, **kwargs):
domains = kwargs.pop("domains", None)

num_updated = Organisation.query.filter_by(id=organisation_id).update(kwargs)
if "email_branding_id" in kwargs:
email_brand = EmailBranding.query.filter_by(id=kwargs["email_branding_id"]).one()
org = Organisation.query.get(organisation_id)
org.email_branding = email_brand

if isinstance(domains, list):
Domain.query.filter_by(organisation_id=organisation_id).delete()
Expand Down
21 changes: 14 additions & 7 deletions app/job/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,27 +168,34 @@ def create_job(service_id):

if template.template_type == SMS_TYPE:
# calculate the number of simulated recipients
numberOfSimulated = sum(
simulated_recipient(i["phone_number"].data, template.template_type) for i in list(recipient_csv.get_rows())
)
mixedRecipients = numberOfSimulated > 0 and numberOfSimulated != len(list(recipient_csv.get_rows()))
numberOfSimulated = sum(simulated_recipient(i["phone_number"].data, template.template_type) for i in recipient_csv.rows)
mixedRecipients = numberOfSimulated > 0 and numberOfSimulated != len(recipient_csv)

# if they have specified testing and NON-testing recipients, raise an error
if mixedRecipients:
raise InvalidRequest(message="Bulk sending to testing and non-testing numbers is not supported", status_code=400)

is_test_notification = len(list(recipient_csv.get_rows())) == numberOfSimulated
is_test_notification = len(recipient_csv) == numberOfSimulated

if not is_test_notification:
check_sms_daily_limit(service, len(recipient_csv))
increment_sms_daily_count_send_warnings_if_needed(service, len(recipient_csv))

elif template.template_type == EMAIL_TYPE:
check_email_daily_limit(service, len(list(recipient_csv.get_rows())))
if "notification_count" in data:
notification_count = int(data["notification_count"])
else:
current_app.logger.warning(
f"notification_count not in metadata for job {data['id']}, using len(recipient_csv) instead."
)
notification_count = len(recipient_csv)

check_email_daily_limit(service, notification_count)

scheduled_for = datetime.fromisoformat(data.get("scheduled_for")) if data.get("scheduled_for") else None

if scheduled_for is None or not scheduled_for.date() > datetime.today().date():
increment_email_daily_count_send_warnings_if_needed(service, len(list(recipient_csv.get_rows())))
increment_email_daily_count_send_warnings_if_needed(service, notification_count)

data.update({"template_version": template.version})

Expand Down
8 changes: 6 additions & 2 deletions app/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,10 @@ class EmailBranding(BaseModel):
nullable=False,
default=BRANDING_ORG_NEW,
)
organisation_id = db.Column(
UUID(as_uuid=True), db.ForeignKey("organisation.id", ondelete="SET NULL"), index=True, nullable=True
)
organisation = db.relationship("Organisation", back_populates="email_branding", foreign_keys=[organisation_id])

def serialize(self) -> dict:
serialized = {
Expand All @@ -285,6 +289,7 @@ def serialize(self) -> dict:
"name": self.name,
"text": self.text,
"brand_type": self.brand_type,
"organisation_id": str(self.organisation_id) if self.organisation_id else "",
}

return serialized
Expand Down Expand Up @@ -449,10 +454,9 @@ class Organisation(BaseModel):
"Domain",
)

email_branding = db.relationship("EmailBranding")
email_branding = db.relationship("EmailBranding", uselist=False)
email_branding_id = db.Column(
UUID(as_uuid=True),
db.ForeignKey("email_branding.id"),
nullable=True,
)

Expand Down
46 changes: 46 additions & 0 deletions migrations/versions/0445_add_org_id_branding.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
"""
Revision ID: 0445_add_org_id_branding
Revises: 0444_add_index_n_history2.py
Create Date: 2024-02-27
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql

revision = "0445_add_org_id_branding"
down_revision = "0444_add_index_n_history2"


def upgrade():
op.add_column(
"email_branding",
sa.Column("organisation_id", postgresql.UUID(as_uuid=True), nullable=True),
)
op.create_index(
op.f("ix_email_branding_organisation_id"),
"email_branding",
["organisation_id"],
unique=False,
)
op.create_foreign_key(
"fk_email_branding_organisation",
"email_branding",
"organisation",
["organisation_id"],
["id"],
ondelete="SET NULL",
)
op.drop_constraint("fk_organisation_email_branding_id", "organisation", type_="foreignkey")


def downgrade():
op.drop_index(op.f("ix_email_branding_organisation_id"), table_name="email_branding")
op.drop_constraint("fk_email_branding_organisation", "email_branding", type_="foreignkey")
op.drop_column("email_branding", "organisation_id")
op.create_foreign_key(
"fk_organisation_email_branding_id",
"organisation",
"email_branding",
["email_branding_id"],
["id"],
)
22 changes: 11 additions & 11 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit 23aecfd

Please sign in to comment.