Skip to content

Commit

Permalink
Merge branch 'main' into feat/scaffold-ui-test-user-on-demand
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewleith authored Sep 13, 2024
2 parents 7352db5 + 5764ae6 commit de7d5b9
Show file tree
Hide file tree
Showing 105 changed files with 4,062 additions and 2,231 deletions.
10 changes: 3 additions & 7 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,10 @@
"[python]": {
"editor.formatOnSave": true
},
"python.formatting.blackPath": "/usr/local/bin/black",
"python.linting.enabled": true,
"python.linting.pylintEnabled": true,
"python.linting.pylintPath": "/usr/local/bin/pylint",
"python.pythonPath": "/usr/local/bin/python"
},
"extensions": [
"bungcip.better-toml",
"tamasfe.even-better-toml",
"donjayamanne.python-extension-pack",
"eamodio.gitlens",
"GitHub.copilot",
Expand All @@ -41,7 +37,8 @@
"visualstudioexptteam.vscodeintellicode",
"wenfangdu.jump",
"wholroyd.jinja",
"yzhang.markdown-all-in-one"
"yzhang.markdown-all-in-one",
"charliermarsh.ruff"
]
}
},
Expand All @@ -61,5 +58,4 @@
},
"postCreateCommand": "notify-dev-entrypoint.sh",
"remoteUser": "vscode",

}
2 changes: 1 addition & 1 deletion .devcontainer/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ services:
- "5432:5432"

redis:
image: redis:6.2@sha256:d4948d011cc38e94f0aafb8f9a60309bd93034e07d10e0767af534512cf012a9
image: redis:6.2@sha256:7919fdd5300e7abf7ae95919e6f144b37c55df16553302dbbcc7495a5aa0c079
restart: always
command: redis-server --port 6380
ports:
Expand Down
2 changes: 1 addition & 1 deletion .devcontainer/scripts/notify-dev-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ cd /workspace
echo -e "fpath+=/.zfunc" >> ~/.zshrc
echo -e "autoload -Uz compinit && compinit"

pip install poetry==${POETRY_VERSION}
pip install poetry==${POETRY_VERSION} poetry-plugin-sort
export PATH=$PATH:/home/vscode/.local/bin/
which poetry
poetry --version
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ossf-scorecard.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
persist-credentials: false

- name: "Run analysis"
uses: ossf/scorecard-action@8c9e2c1222f54716a1df7d7bbb245e2a045b4423
uses: ossf/scorecard-action@6c4912ed9e5f80cfda40164b92753f21f0892cab
with:
results_file: ossf-results.json
results_format: json
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- name: Install poetry
env:
POETRY_VERSION: "1.7.1"
run: pip install poetry==${POETRY_VERSION} && poetry --version
run: pip install poetry==${POETRY_VERSION} poetry-plugin-sort && poetry --version
- name: Check poetry.lock aligns with pyproject.toml
run: poetry check --lock
- name: Install requirements
Expand Down Expand Up @@ -67,7 +67,7 @@ jobs:
run: |
cp -f .env.example .env
- name: Checks for new endpoints against AWS WAF rules
uses: cds-snc/notification-utils/.github/actions/[email protected].2
uses: cds-snc/notification-utils/.github/actions/[email protected].9
with:
app-loc: '/github/workspace'
app-libs: '/github/workspace/env/site-packages'
Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,5 @@ jinja_templates/
cypress.env.json
node_modules/
tests_cypress/cypress/videos/

.ruff_cache/
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@ clean:

.PHONY: format
format:
poetry run isort .
poetry run black --config pyproject.toml .
poetry run flake8 .
ruff check --select I --fix .
ruff check
ruff format .
poetry run mypy .

.PHONY: smoke-test
Expand Down
17 changes: 14 additions & 3 deletions app/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,10 +153,16 @@ def create_app(application, config=None):
# Log the application configuration
application.logger.info(f"Notify config: {config.get_safe_config()}")

# avoid circular imports by importing this file later
from app.commands import setup_commands
# avoid circular imports by importing these files later
from app.commands.bulk_db import setup_bulk_db_commands
from app.commands.deprecated import setup_deprecated_commands
from app.commands.support import setup_support_commands
from app.commands.test_data import setup_test_data_commands

setup_commands(application)
setup_support_commands(application)
setup_bulk_db_commands(application)
setup_test_data_commands(application)
setup_deprecated_commands(application)

return application

Expand All @@ -176,10 +182,12 @@ def register_blueprint(application):
from app.authentication.auth import (
requires_admin_auth,
requires_auth,
requires_cache_clear_auth,
requires_no_auth,
requires_sre_auth,
)
from app.billing.rest import billing_blueprint
from app.cache.rest import cache_blueprint
from app.complaint.complaint_rest import complaint_blueprint
from app.email_branding.rest import email_branding_blueprint
from app.events.rest import events as events_blueprint
Expand Down Expand Up @@ -264,6 +272,9 @@ def register_blueprint(application):

register_notify_blueprint(application, cypress_blueprint, requires_admin_auth, "/cypress")

register_notify_blueprint(application, cache_blueprint, requires_cache_clear_auth)


def register_v2_blueprints(application):
from app.authentication.auth import requires_auth
from app.v2.inbound_sms.get_inbound_sms import (
Expand Down
21 changes: 21 additions & 0 deletions app/authentication/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

JWT_AUTH_TYPE = "jwt"
API_KEY_V1_AUTH_TYPE = "api_key_v1"
CACHE_CLEAR_V1_AUTH_TYPE = "cache_clear_v1"
AUTH_TYPES = [
(
"Bearer",
Expand All @@ -34,6 +35,11 @@
"the API secret generated for you by GC Notify. "
"Learn more: https://documentation.notification.canada.ca/en/start.html#headers.",
),
(
"CacheClear-v1",
CACHE_CLEAR_V1_AUTH_TYPE,
"This is used internally by GC Notify to clear the redis cache after a deployment.",
),
]


Expand Down Expand Up @@ -108,6 +114,21 @@ def requires_sre_auth():
raise AuthError("Unauthorized, sre authentication token required", 401)


def requires_cache_clear_auth():
request_helper.check_proxy_header_before_request()

auth_type, auth_token = get_auth_token(request)
if auth_type != JWT_AUTH_TYPE:
raise AuthError("Invalid scheme: can only use JWT for sre authentication", 401)
client = __get_token_issuer(auth_token)

if client == current_app.config.get("CACHE_CLEAR_USER_NAME"):
g.service_id = current_app.config.get("CACHE_CLEAR_USER_NAME")
return handle_admin_key(auth_token, current_app.config.get("CACHE_CLEAR_CLIENT_SECRET"))
else:
raise AuthError("Unauthorized, cache clear authentication token required", 401)


def requires_auth():
request_helper.check_proxy_header_before_request()

Expand Down
Empty file added app/aws/xray/__init__.py
Empty file.
109 changes: 109 additions & 0 deletions app/aws/xray/context.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import logging

from aws_xray_sdk.core.context import Context
from aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException

log = logging.getLogger(__name__)

MISSING_SEGMENT_MSG = "cannot find the current segment/subsegment, please make sure you have a segment open"
SUPPORTED_CONTEXT_MISSING = ("RUNTIME_ERROR", "LOG_ERROR", "LOG_WARNING", "IGNORE_ERROR")
CXT_MISSING_STRATEGY_KEY = "AWS_XRAY_CONTEXT_MISSING"


class NotifyContext(Context):
"""
This is a custom context class that has more sensitive logging levels
than the default thread local context class.
For example, if there is a check on the current segment, no errors would
be logged but rather warn or info messages would be logged.
The context parent class is the default storage one that works using
a threadlocal. The same technical constraints and feature apply.
"""

def __init__(self, context_missing="LOG_WARNING"):
super().__init__(context_missing)

def put_segment(self, segment):
"""
Store the segment created by ``xray_recorder`` to the context.
It overrides the current segment if there is already one.
"""
super().put_segment(segment)

def end_segment(self, end_time=None):
"""
End the current active segment.
:param float end_time: epoch in seconds. If not specified the current
system time will be used.
"""
super().end_segment(end_time)

def put_subsegment(self, subsegment):
"""
Store the subsegment created by ``xray_recorder`` to the context.
If you put a new subsegment while there is already an open subsegment,
the new subsegment becomes the child of the existing subsegment.
"""
super().put_subsegment(subsegment)

def end_subsegment(self, end_time=None):
"""
End the current active segment. Return False if there is no
subsegment to end.
:param float end_time: epoch in seconds. If not specified the current
system time will be used.
"""
return super().end_subsegment(end_time)

def get_trace_entity(self):
"""
Return the current trace entity(segment/subsegment). If there is none,
it behaves based on pre-defined ``context_missing`` strategy.
If the SDK is disabled, returns a DummySegment
"""
return super().get_trace_entity()

def set_trace_entity(self, trace_entity):
"""
Store the input trace_entity to local context. It will overwrite all
existing ones if there is any.
"""
super().set_trace_entity(trace_entity)

def clear_trace_entities(self):
"""
clear all trace_entities stored in the local context.
In case of using threadlocal to store trace entites, it will
clean up all trace entities created by the current thread.
"""
super().clear_trace_entities()

def handle_context_missing(self):
"""
Called whenever there is no trace entity to access or mutate.
"""
if self.context_missing == "RUNTIME_ERROR":
raise SegmentNotFoundException(MISSING_SEGMENT_MSG)
elif self.context_missing == "LOG_ERROR":
log.error(MISSING_SEGMENT_MSG)
elif self.context_missing == "LOG_WARNING":
log.warning(MISSING_SEGMENT_MSG)

def _is_subsegment(self, entity):
return super()._is_subsegment(entity)

@property
def context_missing(self):
return self._context_missing

@context_missing.setter
def context_missing(self, value):
if value not in SUPPORTED_CONTEXT_MISSING:
log.warning("specified context_missing not supported, using default.")
return

self._context_missing = value
79 changes: 79 additions & 0 deletions app/aws/xray_celery_handlers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import logging

from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.ext.util import construct_xray_header, inject_trace_header

__all__ = (
"xray_after_task_publish",
"xray_before_task_publish",
"xray_task_failure",
"xray_task_postrun",
"xray_task_prerun",
)

logger = logging.getLogger("celery_aws_xray_sdk_extension")

CELERY_NAMESPACE = "celery"


def xray_before_task_publish(
sender=None, headers=None, exchange=None, routing_key=None, properties=None, declare=None, retry_policy=None, **kwargs
):
logger.info(f"xray-celery: before publish: sender={sender}, headers={headers}, kwargs={kwargs}")
headers = headers if headers else {}
task_id = headers.get("id")
current_segment = xray_recorder.current_segment()
# Checks if there is a current segment to create a subsegment,
# otherwise we might be in a starter task. The prerun handler will
# create the segment for us down the road as it will be called after.
if current_segment:
subsegment = xray_recorder.begin_subsegment(name=sender, namespace="remote")
if subsegment:
subsegment.put_metadata("task_id", task_id, namespace=CELERY_NAMESPACE)
inject_trace_header(headers, subsegment)
else:
logger.error(
"xray-celery: Failed to create a X-Ray subsegment on task publish", extra={"celery": {"task_id": task_id}}
)
else:
logger.warn(f"xray-celery: No parent segment found for task {task_id} when trying to create subsegment")


def xray_after_task_publish(headers=None, body=None, exchange=None, routing_key=None, **kwargs):
logger.info(
f"xray-celery: after publish: headers={headers}, body={body}, exchange={exchange}, routing_key={routing_key}, kwargs={kwargs}"
)
if xray_recorder.current_subsegment():
xray_recorder.end_subsegment()
else:
logger.warn(f"xray-celery: Skipping subsegment closing after publish as no subsegment was found: {headers}")


def xray_task_prerun(task_id=None, task=None, args=None, **kwargs):
logger.info(f"xray-celery: prerun: task_id={task_id}, task={task}, kwargs={kwargs}")
xray_header = construct_xray_header(task.request)
segment = xray_recorder.begin_segment(name=task.name, traceid=xray_header.root, parent_id=xray_header.parent)
segment.save_origin_trace_header(xray_header)
segment.put_annotation("routing_key", task.request.properties["delivery_info"]["routing_key"])
segment.put_annotation("task_name", task.name)
segment.put_metadata("task_id", task_id, namespace=CELERY_NAMESPACE)


def xray_task_postrun(task_id=None, task=None, args=None, **kwargs):
logger.info(f"xray-celery: postrun: kwargs={kwargs}")
xray_recorder.end_segment()


def xray_task_failure(task_id=None, exception=None, **kwargs):
logger.info(f"xray-celery: failure: task_id={task_id}, e={exception}, kwargs={kwargs}")
segment = xray_recorder.current_segment()
if not segment:
logger.error(
"xray-celery: Failed to get the current segment on task failure", extra={"celery": {"task_id": kwargs.get("task_id")}}
)
return

if exception:
stack = stacktrace.get_stacktrace(limit=xray_recorder._max_trace_back)
segment.add_exception(exception, stack)
Empty file added app/cache/__init__.py
Empty file.
20 changes: 20 additions & 0 deletions app/cache/rest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from flask import Blueprint, current_app, jsonify
from notifications_utils.clients.redis.cache_keys import CACHE_KEYS_ALL

from app import redis_store
from app.errors import register_errors
from app.schemas import event_schema

cache_blueprint = Blueprint("cache", __name__, url_prefix="/cache-clear")
register_errors(cache_blueprint)


@cache_blueprint.route("", methods=["POST"])
def clear():
try:
max(redis_store.delete_cache_keys_by_pattern(pattern) for pattern in CACHE_KEYS_ALL)
return jsonify(result="ok"), 201
except Exception as e:
current_app.logger.error("Unable to clear the cache", exc_info=e)

return jsonify({"error": "Unable to clear the cache"}), 500
Loading

0 comments on commit de7d5b9

Please sign in to comment.