diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 632c8af7f5..fbb2cf1a3d 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -52,7 +52,6 @@ jobs: NOTIFY_ENVIRONMENT: test RETRY_SQS_URL: RETRY_SQS_URL SQLALCHEMY_DATABASE_URI: postgresql://postgres:postgres@localhost/test_notification_api - SQLALCHEMY_DATABASE_URI_READ: postgresql://postgres:postgres@localhost/test_notification_api TIMEOUT: 10 VA_PROFILE_DOMAIN: int.vaprofile.va.gov VETEXT_API_AUTH_SSM_PATH: test diff --git a/app/__init__.py b/app/__init__.py index db9f430d0a..94f65d733e 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -97,12 +97,8 @@ def create_app(application, worker_id=None): if notify_environment == "test": assert worker_id is not None application.config["SQLALCHEMY_DATABASE_URI"] += f"_{worker_id}" - # set read-db to be the same as write/default instance for testing - application.config["SQLALCHEMY_BINDS"] = {"read-db": application.config["SQLALCHEMY_DATABASE_URI"]} assert "test_notification_api" in application.config["SQLALCHEMY_DATABASE_URI"], \ "Don't run tests against the main database." - assert "test_notification_api" in application.config["SQLALCHEMY_DATABASE_URI_READ"], \ - "Don't run tests against the main database." application.config["NOTIFY_APP_NAME"] = application.name init_app(application) diff --git a/app/cloudfoundry_config.py b/app/cloudfoundry_config.py index 0583a9eb1a..5cb7c437f4 100644 --- a/app/cloudfoundry_config.py +++ b/app/cloudfoundry_config.py @@ -15,7 +15,6 @@ def extract_cloudfoundry_config(): def set_config_env_vars(vcap_services): # Postgres config os.environ['SQLALCHEMY_DATABASE_URI'] = vcap_services['postgres'][0]['credentials']['uri'] - os.environ['SQLALCHEMY_DATABASE_URI_READ'] = vcap_services['postgres'][0]['credentials']['uri'] vcap_application = json.loads(os.environ['VCAP_APPLICATION']) os.environ['NOTIFY_ENVIRONMENT'] = vcap_application['space_name'] diff --git a/app/config.py b/app/config.py index b3eb1a513c..db787bc5d0 100644 --- a/app/config.py +++ b/app/config.py @@ -108,7 +108,6 @@ class Config(object): # DB conection string SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI') - SQLALCHEMY_DATABASE_URI_READ = os.getenv('SQLALCHEMY_DATABASE_URI_READ') # MMG API Key MMG_API_KEY = os.getenv('MMG_API_KEY') @@ -515,10 +514,6 @@ class Development(Config): "SQLALCHEMY_DATABASE_URI", 'postgresql://postgres@localhost/notification_api') - SQLALCHEMY_BINDS = {"read-db": os.getenv( - "SQLALCHEMY_DATABASE_URI_READ", - 'postgresql://postgres@localhost/notification_api')} - ANTIVIRUS_ENABLED = os.getenv('ANTIVIRUS_ENABLED') == '1' for queue in QueueNames.all_queues(): @@ -548,9 +543,6 @@ class Test(Development): 'SQLALCHEMY_DATABASE_URI', 'postgresql://postgres@localhost/test_notification_api' ) - SQLALCHEMY_BINDS = {"read-db": os.getenv( - "SQLALCHEMY_DATABASE_URI_READ", - 'postgresql://postgres@localhost/notification_api')} CELERY_SETTINGS = { 'broker_url': 'you-forgot-to-mock-celery-in-your-tests://' diff --git a/app/dao/api_key_dao.py b/app/dao/api_key_dao.py index 52bb65f98f..d550c9dc0c 100644 --- a/app/dao/api_key_dao.py +++ b/app/dao/api_key_dao.py @@ -17,8 +17,7 @@ def save_model_api_key(api_key): if not api_key.id: api_key.id = uuid.uuid4() # must be set now so version history model can use same id - if not api_key.secret: - api_key.secret = uuid.uuid4() + api_key.secret = uuid.uuid4() db.session.add(api_key) diff --git a/app/dao/dao_utils.py b/app/dao/dao_utils.py index ef7f10f714..e425f56014 100644 --- a/app/dao/dao_utils.py +++ b/app/dao/dao_utils.py @@ -2,8 +2,6 @@ from app import db from app.history_meta import create_history from functools import wraps -from contextlib import contextmanager -from sqlalchemy.orm import scoped_session, sessionmaker def transactional(func): @@ -81,26 +79,3 @@ def record_version(*args, **kwargs): def dao_rollback(): db.session.rollback() - - -@contextmanager -def get_reader_session(): - """ - This context manager is used to abstract the connection to the read-only database engine - in order to execute read queries. By using a scoped session, it ensures that the session - is thread-local and can be reused if needed. It ensures proper handling of the session's - lifecycle by closing it when the context is exited. - - Yields: - session (scoped_session): A session connected to the read-only database engine. - - Example Usage: - with get_reader_session() as session: - result = session.query(Service).filter_by(id=service_id).one() - """ - engine = db.engines['read-db'] - session = scoped_session(sessionmaker(bind=engine)) - try: - yield session - finally: - session.close() diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 1a9528573f..3f64e53389 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -1,18 +1,15 @@ import uuid from datetime import date, datetime, timedelta -from app.service.service_data import ServiceData, ServiceDataException from notifications_utils.statsd_decorators import statsd from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.sql.expression import asc, case, and_, func from sqlalchemy.orm import joinedload -from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound from flask import current_app from app import db from app.dao.date_util import get_current_financial_year from app.dao.dao_utils import ( - get_reader_session, transactional, version_class, VersionOptions, @@ -197,29 +194,16 @@ def dao_fetch_service_by_inbound_number(number): def dao_fetch_service_by_id_with_api_keys(service_id, only_active=False): - with get_reader_session() as session: - query = session.query(Service).filter_by( - id=service_id - ).options( - joinedload('api_keys') - ) + query = Service.query.filter_by( + id=service_id + ).options( + joinedload('api_keys') + ) - if only_active: - query = query.filter(Service.active) - - try: - result = query.one() - # instead of returning the whole model attached to read-db engine - # extract needed properties and return object that can be - # serialized for caching - return ServiceData(result) - except (ServiceDataException, NoResultFound, MultipleResultsFound) as err: - # we handle this failure in the parent - current_app.logger.error("Could not find unique service with ID %s", service_id) - raise NoResultFound(err) - except Exception: - # we handle this failure in the parent - raise + if only_active: + query = query.filter(Service.active) + + return query.one() def dao_fetch_all_services_by_user(user_id, only_active=False): diff --git a/app/models.py b/app/models.py index c31d0d83f4..3289c2a1fe 100644 --- a/app/models.py +++ b/app/models.py @@ -34,7 +34,7 @@ from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.ext.hybrid import hybrid_property -from sqlalchemy.orm.collections import attribute_mapped_collection, InstrumentedList +from sqlalchemy.orm.collections import attribute_mapped_collection SMS_TYPE = 'sms' @@ -448,20 +448,8 @@ def get_default_letter_contact(self): default_letter_contact = [x for x in self.letter_contacts if x.is_default] return default_letter_contact[0].contact_block if default_letter_contact else None - def has_permissions(self, permissions_to_check_for): - if isinstance(permissions_to_check_for, InstrumentedList): - _permissions_to_check_for = [p.permission for p in permissions_to_check_for] - elif not isinstance(permissions_to_check_for, list): - _permissions_to_check_for = [permissions_to_check_for] - else: - _permissions_to_check_for = permissions_to_check_for - - if isinstance(self.permissions, InstrumentedList): - _permissions = [p.permission for p in self.permissions] - else: - _permissions = self.permissions - - return frozenset(_permissions_to_check_for).issubset(frozenset(_permissions)) + def has_permission(self, permission): + return permission in [p.permission for p in self.permissions] def serialize_for_org_dashboard(self): return { diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 160b291aa4..703254824f 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -96,6 +96,7 @@ def persist_notification( template_version=template_version, to=recipient, service_id=service.id, + service=service, personalisation=personalisation, notification_type=notification_type, api_key_id=api_key_id, diff --git a/app/notifications/receive_notifications.py b/app/notifications/receive_notifications.py index 19a0dac70b..50013df8ae 100644 --- a/app/notifications/receive_notifications.py +++ b/app/notifications/receive_notifications.py @@ -223,7 +223,7 @@ def fetch_potential_service(inbound_number: str, provider_name: str) -> Service: current_app.logger.error(message) raise NoSuitableServiceForInboundSms(message) - elif not service.has_permissions([INBOUND_SMS_TYPE, SMS_TYPE]): + elif not has_inbound_sms_permissions(service.permissions): statsd_client.incr(f"inbound.{provider_name}.failed") message = f'Service "{service.id}" does not allow inbound SMS' current_app.logger.error(message) @@ -233,6 +233,11 @@ def fetch_potential_service(inbound_number: str, provider_name: str) -> Service: return service +def has_inbound_sms_permissions(permissions): + str_permissions = [p.permission for p in permissions] + return set([INBOUND_SMS_TYPE, SMS_TYPE]).issubset(set(str_permissions)) + + def strip_leading_forty_four(number): if number.startswith('44'): return number.replace('44', '0', 1) diff --git a/app/notifications/rest.py b/app/notifications/rest.py index 6a0e2e10b9..d084f66a21 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -31,7 +31,8 @@ from app.notifications.validators import ( check_template_is_for_notification_type, check_template_is_active, - check_rate_limiting + check_rate_limiting, + service_has_permission ) from app.schemas import ( email_notification_schema, @@ -121,7 +122,7 @@ def send_notification(notification_type): template_object = create_template_object_for_notification(template, notification_form.get('personalisation', {})) _service_allowed_to_send_to(notification_form, authenticated_service) - if not authenticated_service.has_permissions(notification_type): + if not service_has_permission(notification_type, authenticated_service.permissions): raise InvalidRequest( {'service': ["Cannot send {}".format(get_public_notify_type_text(notification_type, plural=True))]}, status_code=400 @@ -186,9 +187,10 @@ def get_notification_return_data(notification_id, notification, template): def _service_can_send_internationally(service, number): - phone_info = get_international_phone_info(number) + international_phone_info = get_international_phone_info(number) - if phone_info.international and not service.has_permissions(INTERNATIONAL_SMS_TYPE): + if international_phone_info.international and \ + INTERNATIONAL_SMS_TYPE not in [p.permission for p in service.permissions]: raise InvalidRequest( {'to': ["Cannot send to international mobile numbers"]}, status_code=400 diff --git a/app/notifications/validators.py b/app/notifications/validators.py index 52640d241d..18934fd049 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -23,6 +23,7 @@ from app.v2.errors import TooManyRequestsError, BadRequestError, RateLimitError from app import redis_store from app.notifications.process_notifications import create_content_for_notification +from app.utils import get_public_notify_type_text from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_letter_contact_dao import dao_get_letter_contact_by_id @@ -107,12 +108,16 @@ def service_can_send_to_recipient(send_to, key_type, service, allow_whitelisted_ raise BadRequestError(message=message) -# TODO #1410 clean up and remove def service_has_permission(notify_type, permissions): return notify_type in [p.permission for p in permissions] -# TODO #1410 clean up and remove +def check_service_has_permission(notify_type, permissions): + if not service_has_permission(notify_type, permissions): + raise BadRequestError(message="Service is not allowed to send {}".format( + get_public_notify_type_text(notify_type, plural=True))) + + def check_service_can_schedule_notification(permissions, scheduled_for): if scheduled_for: if not service_has_permission(SCHEDULE_NOTIFICATIONS, permissions): @@ -126,14 +131,15 @@ def validate_and_format_recipient(send_to, key_type, service, notification_type, service_can_send_to_recipient(send_to, key_type, service, allow_whitelisted_recipients) if notification_type == SMS_TYPE: - phone_info = get_international_phone_info(send_to) + international_phone_info = get_international_phone_info(send_to) - if phone_info.international and not service.has_permissions(INTERNATIONAL_SMS_TYPE): + if international_phone_info.international and \ + INTERNATIONAL_SMS_TYPE not in [p.permission for p in service.permissions]: raise BadRequestError(message="Cannot send to international mobile numbers") return validate_and_format_phone_number( number=send_to, - international=phone_info.international + international=international_phone_info.international ) elif notification_type == EMAIL_TYPE: return validate_and_format_email_address(email_address=send_to) diff --git a/app/service/send_notification.py b/app/service/send_notification.py index 0c390f2f8c..26b141067f 100644 --- a/app/service/send_notification.py +++ b/app/service/send_notification.py @@ -8,6 +8,7 @@ from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_sms_sender_dao import dao_get_service_sms_sender_by_id from app.notifications.validators import ( + check_service_has_permission, check_service_over_daily_message_limit, validate_and_format_recipient, validate_template @@ -34,7 +35,6 @@ move_uploaded_pdf_to_letters_bucket, ) from app.v2.errors import BadRequestError -from app.utils import get_public_notify_type_text def validate_created_by(service, created_by_id): @@ -135,14 +135,8 @@ def get_reply_to_text(notification_type, sender_id, service, template): def send_pdf_letter_notification(service_id, post_data): service = dao_fetch_service_by_id(service_id) - if not service.has_permissions(LETTER_TYPE): - raise BadRequestError(message="Service is not allowed to send {}".format( - get_public_notify_type_text(LETTER_TYPE, plural=True))) - - if not service.has_permissions(UPLOAD_LETTERS): - raise BadRequestError(message="Service is not allowed to send {}".format( - get_public_notify_type_text(UPLOAD_LETTERS, plural=True))) - + check_service_has_permission(LETTER_TYPE, service.permissions) + check_service_has_permission(UPLOAD_LETTERS, service.permissions) check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) validate_created_by(service, post_data['created_by']) diff --git a/app/service/service_data.py b/app/service/service_data.py deleted file mode 100644 index 10b8afaaeb..0000000000 --- a/app/service/service_data.py +++ /dev/null @@ -1,215 +0,0 @@ -import json -from types import SimpleNamespace -from typing import List, Union - -from app.models import Service - - -class ServiceDataException(Exception): - """ - Custom exception class for handling specific errors related to the ServiceData class. - - Attributes: - message (str): A custom message describing the error. - - Usage: - raise ServiceDataException("Description...") - """ - - def __init__( - self, - message="Unable to create ServiceData object.", - *args, - **kwargs - ): - super().__init__(message, *args, **kwargs) - - -class ServiceDataApiKey: - """ - A class to encapsulate information related to an authenticated service's API key. - - This class serves as a data structure to hold the essential properties of an API key - associated with an authenticated service. It includes a simplified representation of - related service attributes (research mode, restricted, and active status). - - This is needed to deal with circular dependency and multilevel nesting in previously - created SQLAlchemy model. - - Creating simple namespance to preseve dot notation used in calling functions. - - Attributes: - created_at (datetime): The timestamp when the API key was created. - created_by (User): The user who created the API key. - created_by_id (str): The ID of the user who created the API key. - expiry_date (datetime): The expiry date of the API key. - id (str): The unique identifier of the API key. - key_type (str): The type of the API key (e.g., "normal"). - name (str): The name of the API key. - secret (str): The secret string associated with the API key. - service (SimpleNamespace): A namespace containing the 'research_mode', - 'restricted', and 'active' attributes of the related service. - service_id (str): The ID of the related service. - updated_at (datetime): The timestamp when the API key was last updated. - """ - - def __init__(self, key) -> None: - """ - Args: - key (object): The object containing the necessary attributes for the API key. - """ - self.created_at = key.created_at - self.created_by = key.created_by - self.created_by_id = key.created_by_id - self.expiry_date = key.expiry_date - self.id = key.id - self.key_type = key.key_type - self.name = key.name - self.secret = key.secret - - _service = { - "research_mode": key.service.research_mode, - "restricted": key.service.restricted, - "active": key.service.active, - } - self.service = SimpleNamespace(**_service) - self.service_id = key.service_id - self.updated_at = key.updated_at - - def __eq__(self, other: 'ServiceDataApiKey') -> bool: - """ - Determines equality between two instances of ServiceDataApiKey. - - Two instances of ServiceDataApiKey are considered equal if they have the same values for - the following attributes: - - id - - name - - key_type - - created_by_id - - service_id - - secret - - Args: - other (ServiceDataApiKey): The object to compare against. - - Returns: - bool: True if the instances have the same values for the relevant attributes, False otherwise. - """ - if isinstance(other, ServiceDataApiKey): - return self.id == other.id\ - and self.key_type == other.key_type\ - and self.name == other.name\ - and self.service_id == other.service_id\ - and self.created_by_id == other.created_by_id\ - and self.secret == other.secret - return False - - -class ServiceData: - """ - Represents the relevant information extracted from an SQLAlchemy query result. - - This class was created to allow for reading from multiple databases and to facilitate caching. - By extracting only the necessary information from the query result, it ensures compatibility - across different database connections and enables efficient serialization for caching purposes. - The extraction of specific information rather than working with entire query results provides a - more controlled and optimized way to handle the data. - - Attributes: - active (bool): A flag indicating whether the service is active. - permissions (list): A list of permissions associated with the service. - api_keys (list): A list of API keys associated with the service. - id (int): The unique identifier of the service. - research_mode (bool): A flag indicating whether the service is in research mode. - restricted (bool): A flag indicating whether the service is restricted. - rate_limit (int): The rate limit applied to the service. - service_sms_senders (list): A list of SMS senders associated with the service. - message_limit (int): The message limit applied to the service. - users (list): A list of users associated with the service. - whitelist (list): A list of whitelisted entities for the service. - - Methods: - - """ - - def __init__(self, result=None): - self.active = None - self.permissions = None - self.api_keys = None - self.id = None - self.research_mode = None - self.restricted = None - self.rate_limit = None - self.service_sms_senders = None - self.message_limit = None - self.users = None - self.whitelist = None - if result is not None: - try: - self.extract(result) - except Exception as err: - raise ServiceDataException(err) - - def extract(self, result: Service) -> None: - """ - Extracts the necessary data from an authenticated service ORM object. - - Args: - result: The ORM object containing the data. WARNING it's not pure Service - object, but Service augmented with api_keys - - Returns: - None - """ - self.active = result.active - self.permissions = [p.permission for p in result.permissions] - self.api_keys = [ServiceDataApiKey(key) for key in result.api_keys] - self.id = result.id - self.research_mode = result.research_mode - self.restricted = result.restricted - self.rate_limit = result.rate_limit - self.service_sms_senders = result.service_sms_senders - self.message_limit = result.message_limit - self.users = [u for u in result.users] - self.whitelist = [w for w in result.whitelist] - - def serialize(self) -> str: - """ - Serializes the object into a JSON string. - This method might be used for caching, allowing the object to be easily stored and retrieved. - - Returns: - str: A JSON string. - """ - return json.dumps(self.__dict__) - - def has_permissions(self, permissions_to_check_for: Union[str, List[str]]) -> bool: - """ - Checks if the object has the specified permissions. - - Args: - permissions_to_check_for (list): A list or a single permission to check for. - - Returns: - bool: True if all specified permissions are present, False otherwise. - """ - if not isinstance(permissions_to_check_for, list): - tmp = permissions_to_check_for - permissions_to_check_for = [tmp] - return frozenset(permissions_to_check_for).issubset(frozenset(self.permissions)) - - @classmethod - def deserialize(cls, json_string: str) -> 'ServiceData': - """ - Creates a new instance of the class using a JSON string. - This method might be used after retrieval from caching, allowing the object to be reconstructed. - - Args: - json_string (str): A JSON string representing the object. - - Returns: - ServiceData: A new instance of the class populated with the data from the JSON string. - """ - result = cls() - result.__dict__ = json.loads(json_string) - return result diff --git a/app/template/rest.py b/app/template/rest.py index c89c0e220e..b12ee17261 100644 --- a/app/template/rest.py +++ b/app/template/rest.py @@ -98,7 +98,6 @@ def create_template(service_id): errors = {'content': [message]} raise InvalidRequest(errors, status_code=400) - # TODO #1410 clean up validator, use class method instead. if not service_has_permission(new_template.template_type, permissions): message = "Creating {} templates is not allowed".format( get_public_notify_type_text(new_template.template_type)) diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index b30c4ba53b..19424d9d51 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -17,7 +17,6 @@ from app.feature_flags import accept_recipient_identifiers_enabled, is_feature_enabled, FeatureFlag from app.letters.utils import upload_letter_pdf from app.models import ( - SCHEDULE_NOTIFICATIONS, SMS_TYPE, EMAIL_TYPE, LETTER_TYPE, @@ -43,6 +42,8 @@ from app.notifications.validators import ( validate_and_format_recipient, check_rate_limiting, + check_service_can_schedule_notification, + check_service_has_permission, validate_template, check_service_email_reply_to_id, check_service_sms_sender_id @@ -61,7 +62,6 @@ post_letter_request, post_precompiled_letter_request ) -from app.utils import get_public_notify_type_text # FIXME: POST requests to /v2/notifications/letter match this route and the @@ -74,9 +74,8 @@ def post_precompiled_letter_notification(): form = validate(request.get_json(), post_precompiled_letter_request) - if not authenticated_service.has_permissions(LETTER_TYPE): - raise BadRequestError(message="Service is not allowed to send {}".format( - get_public_notify_type_text(LETTER_TYPE, plural=True))) + # Check permission to send letters + check_service_has_permission(LETTER_TYPE, authenticated_service.permissions) check_rate_limiting(authenticated_service, api_user) @@ -132,15 +131,11 @@ def post_notification(notification_type): # noqa: C901 else: abort(404) - if not authenticated_service.has_permissions(notification_type): - raise BadRequestError(message="Service is not allowed to send {}".format( - get_public_notify_type_text(notification_type, plural=True))) + check_service_has_permission(notification_type, authenticated_service.permissions) scheduled_for = form.get("scheduled_for") - if scheduled_for is not None: - if not authenticated_service.has_permissions(SCHEDULE_NOTIFICATIONS): - raise BadRequestError(message="Cannot schedule notifications (this feature is invite-only)") + check_service_can_schedule_notification(authenticated_service.permissions, scheduled_for) check_rate_limiting(authenticated_service, api_user) @@ -313,9 +308,7 @@ def process_document_uploads(personalisation_data, service, simulated=False): personalisation_data = personalisation_data.copy() - if not authenticated_service.has_permissions(UPLOAD_DOCUMENT): - raise BadRequestError(message="Service is not allowed to send {}".format( - get_public_notify_type_text(UPLOAD_DOCUMENT, plural=True))) + check_service_has_permission(UPLOAD_DOCUMENT, authenticated_service.permissions) if any(personalisation_data[key].get('sending_method') == 'link' for key in file_keys): raise NotImplementedError() diff --git a/app/v2/notifications/rest_push.py b/app/v2/notifications/rest_push.py index 349ef15347..69a6d9a99d 100644 --- a/app/v2/notifications/rest_push.py +++ b/app/v2/notifications/rest_push.py @@ -7,6 +7,9 @@ from app.models import ( PUSH_TYPE ) +from app.notifications.validators import ( + check_service_has_permission +) from app.schema_validation import validate from app.v2.errors import BadRequestError from app.v2.notifications import v2_notification_blueprint @@ -14,7 +17,6 @@ push_notification_request ) from app.va.vetext import (VETextRetryableException, VETextNonRetryableException, VETextBadRequestException) -from app.utils import get_public_notify_type_text @v2_notification_blueprint.route('/push', methods=['POST']) @@ -22,10 +24,7 @@ def send_push_notification(): if not is_feature_enabled(FeatureFlag.PUSH_NOTIFICATIONS_ENABLED): raise NotImplementedError() - if not authenticated_service.has_permissions(PUSH_TYPE): - raise BadRequestError(message="Service is not allowed to send {}".format( - get_public_notify_type_text(PUSH_TYPE, plural=True))) - + check_service_has_permission(PUSH_TYPE, authenticated_service.permissions) req_json = validate(request.get_json(), push_notification_request) registry = MobileAppRegistry() diff --git a/cd/application-deployment/dev/vaec-api-task-definition.json b/cd/application-deployment/dev/vaec-api-task-definition.json index 45ba482f84..c48da9b809 100644 --- a/cd/application-deployment/dev/vaec-api-task-definition.json +++ b/cd/application-deployment/dev/vaec-api-task-definition.json @@ -256,10 +256,6 @@ "name": "SQLALCHEMY_DATABASE_URI", "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/dev/notification-api/database/uri" }, - { - "name": "SQLALCHEMY_DATABASE_URI_READ", - "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/dev/notification-api/database/uri_read" - }, { "name": "TWILIO_ACCOUNT_SID", "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/dev/notification-api/twilio/account-sid" diff --git a/cd/application-deployment/perf/vaec-api-task-definition.json b/cd/application-deployment/perf/vaec-api-task-definition.json index 5ea6eb4a48..59e1aa84d7 100644 --- a/cd/application-deployment/perf/vaec-api-task-definition.json +++ b/cd/application-deployment/perf/vaec-api-task-definition.json @@ -204,10 +204,6 @@ "name": "SQLALCHEMY_DATABASE_URI", "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/perf/notification-api/database/uri" }, - { - "name": "SQLALCHEMY_DATABASE_URI_READ", - "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/perf/notification-api/database/uri_read" - }, { "name": "ADMIN_CLIENT_SECRET", "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/perf/notification-api/admin-client-secret" diff --git a/cd/application-deployment/prod/vaec-api-task-definition.json b/cd/application-deployment/prod/vaec-api-task-definition.json index bca9bd844c..c411d10654 100644 --- a/cd/application-deployment/prod/vaec-api-task-definition.json +++ b/cd/application-deployment/prod/vaec-api-task-definition.json @@ -220,10 +220,6 @@ "name": "SQLALCHEMY_DATABASE_URI", "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/prod/notification-api/database/uri" }, - { - "name": "SQLALCHEMY_DATABASE_URI_READ", - "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/prod/notification-api/database/uri_read" - }, { "name": "ADMIN_CLIENT_SECRET", "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/prod/notification-api/admin-client-secret" diff --git a/cd/application-deployment/staging/vaec-api-task-definition.json b/cd/application-deployment/staging/vaec-api-task-definition.json index 8e7d4cd2de..321ca8e828 100644 --- a/cd/application-deployment/staging/vaec-api-task-definition.json +++ b/cd/application-deployment/staging/vaec-api-task-definition.json @@ -236,10 +236,6 @@ "name": "SQLALCHEMY_DATABASE_URI", "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/staging/notification-api/database/uri" }, - { - "name": "SQLALCHEMY_DATABASE_URI_READ", - "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/staging/notification-api/database/uri_read" - }, { "name": "ADMIN_CLIENT_SECRET", "valueFrom": "arn:aws-us-gov:ssm:us-gov-west-1:171875617347:parameter/staging/notification-api/admin-client-secret" diff --git a/ci/.docker-env.example b/ci/.docker-env.example index d37f7db240..291cce6d6e 100644 --- a/ci/.docker-env.example +++ b/ci/.docker-env.example @@ -2,7 +2,6 @@ TWILIO_ACCOUNT_SID=add value TWILIO_AUTH_TOKEN=add value SQLALCHEMY_DATABASE_URI=postgresql://postgres:LocalPassword@db:5432/notification_api -SQLALCHEMY_DATABASE_URI_READ=postgresql://postgres:LocalPassword@db:5432/notification_api NOTIFY_ENVIRONMENT=development FLASK_APP=application.py @@ -46,4 +45,4 @@ GITHUB_CLIENT_ID=fake-id GITHUB_CLIENT_SECRET=fake-secret REDIS_ENABLED='True' -REDIS_URL=redis://redis:6379/0 +REDIS_URL=redis://localhost:6379/0 diff --git a/ci/README.md b/ci/README.md deleted file mode 100644 index d27dc3af14..0000000000 --- a/ci/README.md +++ /dev/null @@ -1,119 +0,0 @@ -# Testing Write and Read instance - -## Fake Replication Setup -NOTE: This setup is enough to test basic route functions with Read & Write instances. Scroll down if you need fully functional / real time replication between read and write instances. -#### 1. Setup Docker-Compose -Update local environment in your `.docker-env`. You should have Read & Write URIs pointing to different instances. -``` -SQLALCHEMY_DATABASE_URI=postgresql://postgres:LocalPassword@db:5432/notification_api -SQLALCHEMY_DATABASE_URI_READ=postgresql://postgres:LocalPassword@db-read:5432/notification_api -``` -Start containers -``` -docker-compose -f ci/docker-compose-local-readwrite.yml up -d -``` - -#### 2. Create snapshot of the write database instance -``` -pg_dump -U postgres -W -F t notification_api > /var/db-data/writedb.dump -``` - -#### 3. Copy snapshot of the write database instance into read instance's mounted volume on your system -``` -cp ci/db-data/writedb.dump ci/db-data-read/writedb.dump -``` - -#### 4. Restore data from write instance on read instance -``` -pg_restore -U postgres -W -d notification_api /var/db-data-read/writedb.dump -``` - -#### 5. Stop containers & remove data when done testing -``` -docker-compose -f ci/docker-compose-local-readwrite.yml down -v -rm -fr db-data* -``` - -## Real Replication Setup -Note: use if you need to test real time synchronization -1. run docker compose file listed below -2. take back up from an instance that has volume mounted -``` -pg_basebackup -h write-db -D /var/lib/postgresql/data_sync -U replicator -P -R -X stream -``` -1. stop only **read-db** -2. replace contents of **data-read** directory with contents of **data-sync** -3. start **read-db** -###### setup-replica.sh -``` -#!/bin/bash -set -e - -cat >> ${PGDATA}/postgresql.conf <> ${PGDATA}/postgresql.conf <> ${PGDATA}/pg_hba.conf - -pg_ctl reload -``` - -##### docker-compose.yml -``` -services: - write-db: - image: postgres:13 # adjust to the version you need - container_name: write-db - environment: - POSTGRES_USER: - POSTGRES_PASSWORD: - POSTGRES_DB: notification_api - volumes: - - ./pgdata-write:/var/lib/postgresql/data - - ./pgdata-sync:/var/lib/postgresql/data_sync - - ./setup-replication.sh:/docker-entrypoint-initdb.d/setup-replication.sh - ports: - - "5432:5432" - command: ["postgres", "-c", "log_statement=all"] - - read-db: - image: postgres:13 # adjust to the version you need - container_name: read-db - depends_on: - - write-db - environment: - POSTGRES_USER: - POSTGRES_PASSWORD: - POSTGRES_DB: notification_api - volumes: - - ./pgdata-read:/var/lib/postgresql/data - - ./setup-replica.sh:/docker-entrypoint-initdb.d/setup-replica.sh - ports: - - "5433:5432" - links: - - write-db - command: ["postgres", "-c", "log_statement=all"] -``` \ No newline at end of file diff --git a/ci/docker-compose-local-readwrite.yml b/ci/docker-compose-local-readwrite.yml deleted file mode 100644 index e53bee2896..0000000000 --- a/ci/docker-compose-local-readwrite.yml +++ /dev/null @@ -1,100 +0,0 @@ -version: "3" -services: - app: - build: - context: .. - dockerfile: ci/Dockerfile.local - image: notification_api - volumes: - - "../:/app:ro" - ports: - - 6011:6011 - env_file: - - .docker-env - depends_on: - - migrations - - localstack - - db: - image: postgres:15 - restart: unless-stopped - ports: - - 5432:5432 - environment: - - POSTGRES_PASSWORD=LocalPassword - - POSTGRES_DB=notification_api - volumes: - - ./db-data:/var/db-data - - db-read: - image: postgres:15 - restart: unless-stopped - ports: - - 5433:5432 - environment: - - POSTGRES_PASSWORD=LocalPassword - - POSTGRES_DB=notification_api - volumes: - - ./db-data-read:/var/db-data-read - - migrations: - image: notification_api - env_file: - - .docker-env - volumes: - - "../:/app:ro" - command: ["bash", "-c", "flask db upgrade"] - depends_on: - - db - - localstack: - image: localstack/localstack - container_name: localstack - ports: - - "4566:4566" - env_file: - - .docker-env - environment: - - SERVICES=ec2,iam,lambda,s3,ses,sqs,sts - - DEFAULT_REGION=us-east-2 - - DATA_DIR=/tmp/localstack/data - - START_WEB=0 - - HOSTNAME_EXTERNAL=localstack - volumes: - - './localstack_setup.sh:/docker-entrypoint-initaws.d/setup.sh' - - celery: - image: notification_api - env_file: - - .docker-env - volumes: - - "../:/app:ro" - command: > - sh -c "/app/scripts/wait_for_it.sh localstack:4566 --timeout=30 -- /app/scripts/run_celery.sh" - depends_on: - - app - - localstack - - mountebank - healthcheck: - test: ["CMD", "sh", "-c", "/app/scripts/check_celery.sh"] - interval: 30s - timeout: 10s - retries: 5 - - mountebank: - container_name: mountebank - image: bbyars/mountebank:2.4.0 - volumes: - - ../scripts/mountebank:/mountebank - ports: - - 2525:2525 - - 7005:7005 - - 7006:7006 - - 7007:7007 - - 7008:7008 - command: start --configfile /mountebank/imposters.ejs --allowInjection - - redis: - image: redis - ports: - - "6379:6379" diff --git a/ci/docker-compose-local.yml b/ci/docker-compose-local.yml index c74a5358d6..a92ec25468 100644 --- a/ci/docker-compose-local.yml +++ b/ci/docker-compose-local.yml @@ -14,7 +14,6 @@ services: depends_on: - migrations - localstack - db: image: postgres:15 restart: unless-stopped @@ -23,7 +22,6 @@ services: environment: - POSTGRES_PASSWORD=LocalPassword - POSTGRES_DB=notification_api - migrations: image: notification_api env_file: @@ -33,7 +31,6 @@ services: command: ["bash", "-c", "flask db upgrade"] depends_on: - db - localstack: image: localstack/localstack container_name: localstack @@ -49,7 +46,6 @@ services: - HOSTNAME_EXTERNAL=localstack volumes: - './localstack_setup.sh:/docker-entrypoint-initaws.d/setup.sh' - celery: image: notification_api env_file: @@ -67,7 +63,6 @@ services: interval: 30s timeout: 10s retries: 5 - mountebank: container_name: mountebank image: bbyars/mountebank:2.4.0 @@ -80,8 +75,9 @@ services: - 7007:7007 - 7008:7008 command: start --configfile /mountebank/imposters.ejs --allowInjection - redis: image: redis ports: - "6379:6379" + + diff --git a/ci/docker-compose-test.yml b/ci/docker-compose-test.yml index f005d50e40..c37b20ac54 100644 --- a/ci/docker-compose-test.yml +++ b/ci/docker-compose-test.yml @@ -22,7 +22,6 @@ services: - NOTIFY_ENVIRONMENT=test - RETRY_SQS_URL=RETRY_SQS_URL - SQLALCHEMY_DATABASE_URI=postgresql://postgres:LocalPassword@db:5432/test_notification_api - - SQLALCHEMY_DATABASE_URI_READ=postgresql://postgres:LocalPassword@db:5432/test_notification_api - TIMEOUT=10 - VA_PROFILE_DOMAIN=int.vaprofile.va.gov - VETEXT_API_AUTH_SSM_PATH=test diff --git a/ci/docker-compose.yml b/ci/docker-compose.yml index 4e97ca7ab0..acb90b0b85 100644 --- a/ci/docker-compose.yml +++ b/ci/docker-compose.yml @@ -10,7 +10,6 @@ services: - TWILIO_ACCOUNT_SID= - TWILIO_AUTH_TOKEN= - SQLALCHEMY_DATABASE_URI=postgresql://postgres:LocalPassword@db:5432/notification_api - - SQLALCHEMY_DATABASE_URI_READ=postgresql://postgres:LocalPassword@db:5432/notification_api - NOTIFY_ENVIRONMENT=development - FLASK_APP=application.py depends_on: @@ -31,7 +30,6 @@ services: - TWILIO_ACCOUNT_SID= - TWILIO_AUTH_TOKEN= - SQLALCHEMY_DATABASE_URI=postgresql://postgres:LocalPassword@db:5432/notification_api - - SQLALCHEMY_DATABASE_URI_READ=postgresql://postgres:LocalPassword@db:5432/notification_api - NOTIFY_ENVIRONMENT=development - FLASK_APP=application.py depends_on: diff --git a/tests/README.md b/tests/README.md index 4b546825f9..c086afacab 100644 --- a/tests/README.md +++ b/tests/README.md @@ -9,9 +9,9 @@ The docker-compose command used to run the full test suite sets environment vari ## Setup 1. Stop all running containers associated with Notification-api. -2. Start the Postgres (ci_db_1) container, and any other containers required by the functionality under test: `docker start ci-db-1`. All migrations should already be applied. -3. Start a test container shell by running `docker run --rm -it -v ":/app" --env-file tests/env_vars --name ci-test ci-test bash`. -4. Add the test container started in the previous step to the default network: `docker network connect ci_default ci-test`. +2. Start the Postgres (ci_db_1) container, and any other containers required by the functionality under test: `docker start ci_db_1`. All migrations should already be applied. +3. Start a test container shell by running `docker run --rm -it -v ":/app" --env-file tests/env_vars ci_test bash`. +4. Add the test container started in the previous step to the default network: `docker network connect ci_default `. 5. In the test container shell, run `pytest -h` to see the syntax for running tests. Without flags, you can run `pytest [file or directory]...`. ## Running Individual Tests diff --git a/tests/app/authentication/test_authentication.py b/tests/app/authentication/test_authentication.py index e4b7774b16..9e9ce42055 100644 --- a/tests/app/authentication/test_authentication.py +++ b/tests/app/authentication/test_authentication.py @@ -6,7 +6,7 @@ from flask_jwt_extended import create_access_token from jwt import ExpiredSignatureError -from app.dao.services_dao import dao_add_user_to_service, dao_update_service +from app.dao.services_dao import dao_add_user_to_service from tests.app.db import create_user, create_service from tests.conftest import set_config_values @@ -16,8 +16,7 @@ from notifications_python_client.authentication import create_jwt_token from app import api_user -from app.dao.api_key_dao import get_unsigned_secrets,\ - save_model_api_key, get_unsigned_secret, expire_api_key +from app.dao.api_key_dao import get_unsigned_secrets, save_model_api_key, get_unsigned_secret, expire_api_key from app.models import ApiKey, KEY_TYPE_NORMAL, PERMISSION_LIST, Permission from app.authentication.auth import AuthError, validate_admin_auth, validate_service_api_key_auth, \ requires_admin_auth_or_user_in_service, requires_user_in_service_or_admin @@ -279,10 +278,8 @@ def test_authentication_returns_error_when_service_doesnt_exit( def test_authentication_returns_error_when_service_inactive(client, sample_api_key): sample_api_key.service.active = False - # we now need to save the model because we'll read using read-db engine - dao_update_service(sample_api_key.service) + token = create_jwt_token(secret=str(sample_api_key.id), client_id=str(sample_api_key.service_id)) - token = create_jwt_token(secret=str(sample_api_key.secret), client_id=str(sample_api_key.service_id)) response = client.get('/notifications', headers={'Authorization': 'Bearer {}'.format(token)}) assert response.status_code == 403 @@ -304,23 +301,21 @@ def test_authentication_returns_error_when_service_has_no_secrets(client, assert exc.value.service_id == sample_service.id -def test_should_attach_the_current_api_key_to_current_app(notify_api, sample_service_data_api_key): +def test_should_attach_the_current_api_key_to_current_app(notify_api, sample_service, sample_api_key): with notify_api.test_request_context(), notify_api.test_client() as client: - _key = sample_service_data_api_key - token = create_jwt_token(secret=_key.secret, client_id=str(_key.service_id)) + token = __create_token(sample_api_key.service_id) response = client.get( '/notifications', headers={'Authorization': 'Bearer {}'.format(token)} ) assert response.status_code == 200 - # api_user is an api key assigned globally after a service is authenticated - assert api_user == _key + assert api_user == sample_api_key def test_should_return_403_when_token_is_expired(client, sample_api_key): with freeze_time('2001-01-01T12:00:00'): - token = create_jwt_token(secret=sample_api_key.secret, client_id=str(sample_api_key.service_id)) + token = __create_token(sample_api_key.service_id) with freeze_time('2001-01-01T12:00:40'): with pytest.raises(AuthError) as exc: request.headers = {'Authorization': 'Bearer {}'.format(token)} diff --git a/tests/app/conftest.py b/tests/app/conftest.py index e154589d5d..d7ccfc0bcd 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -1,6 +1,5 @@ import json import os -from app.service.service_data import ServiceDataApiKey import pytest import pytz import requests_mock @@ -582,23 +581,6 @@ def sample_api_key(notify_db, return api_key -@pytest.fixture(scope='function') -def sample_service_data_api_key(service=None, key_type=KEY_TYPE_NORMAL, name=None): - if service is None: - service = create_service(check_if_service_exists=True) - - data = { - 'service': service, - 'name': name or uuid.uuid4(), - 'created_by': service.created_by, - 'key_type': key_type - } - api_key = ApiKey(**data) - save_model_api_key(api_key) - - return ServiceDataApiKey(api_key) - - @pytest.fixture(scope='function') def sample_test_api_key(sample_api_key): service = create_service(check_if_service_exists=True) diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 301ec74b72..6410b5cdaf 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -12,6 +12,7 @@ format_mmg_datetime, create_inbound_sms_object, strip_leading_forty_four, + has_inbound_sms_permissions, unescape_string, fetch_potential_service, NoSuitableServiceForInboundSms, ) @@ -99,7 +100,7 @@ def test_receive_notification_returns_received_to_mmg(client, mocker, sample_ser ]) def test_check_permissions_for_inbound_sms(notify_db, notify_db_session, permissions, expected_response): service = create_service(service_permissions=permissions) - assert service.has_permissions([INBOUND_SMS_TYPE, SMS_TYPE]) is expected_response + assert has_inbound_sms_permissions(service.permissions) is expected_response @pytest.mark.skip(reason="Endpoint disabled and slated for removal") @@ -721,13 +722,9 @@ def test_should_raise_if_no_matching_service(self, notify_api, mocker): fetch_potential_service('some-inbound-number', 'some-provider-name') def test_should_raise_if_service_doesnt_have_permission(self, notify_api, mocker): - # make mocked service execute original code - # just mocking service won't let us execute .has_permissions - # method properly - mock_service_instance = Service(permissions=[]) mocker.patch( 'app.notifications.receive_notifications.dao_fetch_service_by_inbound_number', - return_value=mock_service_instance + return_value=mocker.Mock(Service, permissions=[]) ) with pytest.raises(NoSuitableServiceForInboundSms): diff --git a/tests/app/test_cloudfoundry_config.py b/tests/app/test_cloudfoundry_config.py index 2304673a44..ccf238d49c 100644 --- a/tests/app/test_cloudfoundry_config.py +++ b/tests/app/test_cloudfoundry_config.py @@ -36,7 +36,6 @@ def test_extract_cloudfoundry_config_populates_other_vars(): extract_cloudfoundry_config() assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgres uri' - assert os.environ['SQLALCHEMY_DATABASE_URI_READ'] == 'postgres uri' assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌' assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log' diff --git a/tests/env_vars b/tests/env_vars index dce1cf72f7..337b4d13e7 100644 --- a/tests/env_vars +++ b/tests/env_vars @@ -1,7 +1,6 @@ # https://docs.docker.com/compose/env-file/ SQLALCHEMY_DATABASE_URI=postgresql://postgres:LocalPassword@db:5432/test_notification_api -SQLALCHEMY_DATABASE_URI_READ=postgresql://postgres:LocalPassword@db:5432/test_notification_api NOTIFY_ENVIRONMENT=test AWS_ACCESS_KEY_ID=test AWS_PINPOINT_APP_ID=AWS_PINPOINT_APP_ID