diff --git a/.env-dist b/.env-dist index daca58ec3ff..3d38e57838e 100644 --- a/.env-dist +++ b/.env-dist @@ -65,9 +65,6 @@ BYTES_DB_URI=postgresql://${BYTES_DB_USER}:${BYTES_DB_PASSWORD}@postgres:5432/${ # --- Octopoes --- # # See `octopoes/octopoes/config/settings.py` -# Number of Celery workers (for the Octopoes API worker) that need to be started -CELERY_WORKER_CONCURRENCY=${CELERY_WORKER_CONCURRENCY:-4} - # --- Mula --- # # See `mula/scheduler/config/settings.py` diff --git a/boefjes/boefjes/plugins/kat_fierce/boefje.json b/boefjes/boefjes/plugins/kat_fierce/boefje.json index 9e1ab8182ef..9f15dbcb544 100644 --- a/boefjes/boefjes/plugins/kat_fierce/boefje.json +++ b/boefjes/boefjes/plugins/kat_fierce/boefje.json @@ -1,9 +1,9 @@ { "id": "fierce", "name": "Fierce", - "description": "Perform DNS reconnaissance using Fierce. Helps to locate non-contiguous IP space and hostnames against specified hostnames. No exploitation is performed.", + "description": "Perform DNS reconnaissance using Fierce. Helps to locate non-contiguous IP space and hostnames against specified hostnames. No exploitation is performed. Beware if your DNS is managed by an external party. This boefjes performs a brute force attack against the name server.", "consumes": [ "Hostname" ], - "scan_level": 1 + "scan_level": 3 } diff --git a/boefjes/boefjes/plugins/pdio_subfinder/boefje.json b/boefjes/boefjes/plugins/pdio_subfinder/boefje.json index fd69ae598c1..abb75748aa0 100644 --- a/boefjes/boefjes/plugins/pdio_subfinder/boefje.json +++ b/boefjes/boefjes/plugins/pdio_subfinder/boefje.json @@ -1,7 +1,7 @@ { "id": "pdio-subfinder", "name": "Subfinder", - "description": "A subdomain discovery tool. (projectdiscovery.io)", + "description": "A subdomain discovery tool. (projectdiscovery.io). Returns valid subdomains for websites using passive online sources. Beware that many of the online sources require their own API key to get more accurate data.", "consumes": [ "Hostname" ], @@ -9,5 +9,5 @@ "SUBFINDER_RATE_LIMIT", "SUBFINDER_VERSION" ], - "scan_level": 2 + "scan_level": 1 } diff --git a/mula/scheduler/connectors/services/katalogus.py b/mula/scheduler/connectors/services/katalogus.py index 8a44543a326..99c4b3a011f 100644 --- a/mula/scheduler/connectors/services/katalogus.py +++ b/mula/scheduler/connectors/services/katalogus.py @@ -44,90 +44,95 @@ def __init__(self, host: str, source: str, timeout: int, pool_connections: int, def flush_caches(self) -> None: self.flush_plugin_cache() - self.flush_normalizer_cache() - self.flush_boefje_cache() + self.flush_boefje_cache(self.plugin_cache) + self.flush_normalizer_cache(self.plugin_cache) - def flush_plugin_cache(self) -> None: + def flush_plugin_cache(self): self.logger.debug("Flushing the katalogus plugin cache for organisations") + plugin_cache: dict = {} + orgs = self.get_organisations() + for org in orgs: + plugin_cache.setdefault(org.id, {}) + + plugins = self.get_plugins_by_organisation(org.id) + plugin_cache[org.id] = {plugin.id: plugin for plugin in plugins if plugin.enabled} + with self.plugin_cache_lock: # First, we reset the cache, to make sure we won't get any ExpiredError self.plugin_cache.expiration_enabled = False self.plugin_cache.reset() - - orgs = self.get_organisations() - for org in orgs: - self.plugin_cache.setdefault(org.id, {}) - - plugins = self.get_plugins_by_organisation(org.id) - self.plugin_cache[org.id] = {plugin.id: plugin for plugin in plugins if plugin.enabled} - + self.plugin_cache.cache = plugin_cache self.plugin_cache.expiration_enabled = True self.logger.debug("Flushed the katalogus plugin cache for organisations") - def flush_boefje_cache(self) -> None: + def flush_boefje_cache(self, plugins=None) -> None: """boefje.consumes -> plugin type boefje""" self.logger.debug("Flushing the katalogus boefje type cache for organisations") - with self.boefje_cache_lock: - # First, we reset the cache, to make sure we won't get any ExpiredError - self.boefje_cache.expiration_enabled = False - self.boefje_cache.reset() - - orgs = self.get_organisations() - for org in orgs: - self.boefje_cache[org.id] = {} + boefje_cache: dict = {} + orgs = self.get_organisations() + for org in orgs: + boefje_cache.setdefault(org.id, {}) - for plugin in self.get_plugins_by_organisation(org.id): - if plugin.type != "boefje": - continue + org_plugins = plugins[org.id].values() if plugins else self.get_plugins_by_organisation(org.id) + for plugin in org_plugins: + if plugin.type != "boefje": + continue - if plugin.enabled is False: - continue + if plugin.enabled is False: + continue - if not plugin.consumes: - continue + if not plugin.consumes: + continue - # NOTE: backwards compatibility, when it is a boefje the - # consumes field is a string field. - if isinstance(plugin.consumes, str): - self.boefje_cache[org.id].setdefault(plugin.consumes, []).append(plugin) - continue + # NOTE: backwards compatibility, when it is a boefje the + # consumes field is a string field. + if isinstance(plugin.consumes, str): + boefje_cache[org.id].setdefault(plugin.consumes, []).append(plugin) + continue - for type_ in plugin.consumes: - self.boefje_cache[org.id].setdefault(type_, []).append(plugin) + for type_ in plugin.consumes: + boefje_cache[org.id].setdefault(type_, []).append(plugin) + with self.boefje_cache_lock: + # First, we reset the cache, to make sure we won't get any ExpiredError + self.boefje_cache.expiration_enabled = False + self.boefje_cache.reset() + self.boefje_cache.cache = boefje_cache self.boefje_cache.expiration_enabled = True self.logger.debug("Flushed the katalogus boefje type cache for organisations") - def flush_normalizer_cache(self) -> None: + def flush_normalizer_cache(self, plugins=None) -> None: """normalizer.consumes -> plugin type normalizer""" self.logger.debug("Flushing the katalogus normalizer type cache for organisations") - with self.normalizer_cache_lock: - # First, we reset the cache, to make sure we won't get any ExpiredError - self.normalizer_cache.expiration_enabled = False - self.normalizer_cache.reset() + normalizer_cache: dict = {} + orgs = self.get_organisations() + for org in orgs: + normalizer_cache.setdefault(org.id, {}) - orgs = self.get_organisations() - for org in orgs: - self.normalizer_cache[org.id] = {} + org_plugins = plugins[org.id].values() if plugins else self.get_plugins_by_organisation(org.id) + for plugin in org_plugins: + if plugin.type != "normalizer": + continue - for plugin in self.get_plugins_by_organisation(org.id): - if plugin.type != "normalizer": - continue + if plugin.enabled is False: + continue - if plugin.enabled is False: - continue + if not plugin.consumes: + continue - if not plugin.consumes: - continue - - for type_ in plugin.consumes: - self.normalizer_cache[org.id].setdefault(type_, []).append(plugin) + for type_ in plugin.consumes: + normalizer_cache[org.id].setdefault(type_, []).append(plugin) + with self.normalizer_cache_lock: + # First, we reset the cache, to make sure we won't get any ExpiredError + self.normalizer_cache.expiration_enabled = False + self.normalizer_cache.reset() + self.normalizer_cache.cache = normalizer_cache self.normalizer_cache.expiration_enabled = True self.logger.debug("Flushed the katalogus normalizer type cache for organisations") diff --git a/mula/scheduler/rankers/boefje.py b/mula/scheduler/rankers/boefje.py index 942ef55a93a..f951aea4f84 100644 --- a/mula/scheduler/rankers/boefje.py +++ b/mula/scheduler/rankers/boefje.py @@ -31,11 +31,11 @@ def rank(self, obj: Any) -> int: grace_period = timedelta(seconds=self.ctx.config.pq_grace_period) # New tasks that have not yet run before - if obj.prior_tasks is None or not obj.prior_tasks: + if obj.latest_task is None or not obj.latest_task: return 2 # Make sure that we don't have tasks that are still in the grace period - time_since_grace_period = ((datetime.now(timezone.utc) - obj.prior_tasks[0].modified_at) - grace_period).seconds + time_since_grace_period = ((datetime.now(timezone.utc) - obj.latest_task.modified_at) - grace_period).seconds if time_since_grace_period < 0: return -1 diff --git a/mula/scheduler/schedulers/boefje.py b/mula/scheduler/schedulers/boefje.py index 0b42b3e5549..a6768b884b5 100644 --- a/mula/scheduler/schedulers/boefje.py +++ b/mula/scheduler/schedulers/boefje.py @@ -562,8 +562,8 @@ def push_boefje_task(self, boefje_task: BoefjeTask, caller: str = "") -> None: ) return - prior_tasks = self.ctx.datastores.task_store.get_tasks_by_hash(boefje_task.hash) - score = self.priority_ranker.rank(SimpleNamespace(prior_tasks=prior_tasks, task=boefje_task)) + latest_task = self.ctx.datastores.task_store.get_latest_task_by_hash(boefje_task.hash) + score = self.priority_ranker.rank(SimpleNamespace(latest_task=latest_task, task=boefje_task)) task = Task( id=boefje_task.id, diff --git a/mula/tests/integration/test_boefje_scheduler.py b/mula/tests/integration/test_boefje_scheduler.py index a925bde5619..9b9986f8415 100644 --- a/mula/tests/integration/test_boefje_scheduler.py +++ b/mula/tests/integration/test_boefje_scheduler.py @@ -577,10 +577,10 @@ def test_push_task_no_ooi(self): @mock.patch("scheduler.schedulers.BoefjeScheduler.has_boefje_permission_to_run") @mock.patch("scheduler.schedulers.BoefjeScheduler.has_boefje_task_grace_period_passed") @mock.patch("scheduler.schedulers.BoefjeScheduler.is_item_on_queue_by_hash") - @mock.patch("scheduler.context.AppContext.datastores.task_store.get_tasks_by_hash") + @mock.patch("scheduler.context.AppContext.datastores.task_store.get_latest_task_by_hash") def test_push_task_queue_full( self, - mock_get_tasks_by_hash, + mock_get_latest_task_by_hash, mock_is_item_on_queue_by_hash, mock_has_boefje_task_grace_period_passed, mock_has_boefje_permission_to_run, @@ -606,7 +606,7 @@ def test_push_task_queue_full( mock_has_boefje_task_started_running.return_value = False mock_has_boefje_task_grace_period_passed.return_value = True mock_is_item_on_queue_by_hash.return_value = False - mock_get_tasks_by_hash.return_value = None + mock_get_latest_task_by_hash.return_value = None self.mock_get_plugin.return_value = PluginFactory(scan_level=0, consumes=[ooi.object_type]) # Act diff --git a/octopoes/octopoes/config/celery.py b/octopoes/octopoes/config/celery.py index b9894771081..b47a011f6d2 100644 --- a/octopoes/octopoes/config/celery.py +++ b/octopoes/octopoes/config/celery.py @@ -14,3 +14,5 @@ result_accept_content = ["application/json", "application/x-python-serialize"] task_queues = (Queue(QUEUE_NAME_OCTOPOES),) + +worker_concurrency = settings.workers diff --git a/octopoes/octopoes/config/settings.py b/octopoes/octopoes/config/settings.py index 16fce6969b6..5f6e9df343d 100644 --- a/octopoes/octopoes/config/settings.py +++ b/octopoes/octopoes/config/settings.py @@ -70,6 +70,8 @@ class Settings(BaseSettings): outgoing_request_timeout: int = Field(30, description="Timeout for outgoing HTTP requests") + workers: int = Field(4, description="Number of Octopoes Celery workers") + model_config = SettingsConfigDict(env_prefix="OCTOPOES_") @classmethod diff --git a/rocky/reports/forms.py b/rocky/reports/forms.py index 265d0d15e27..e9a6e58815e 100644 --- a/rocky/reports/forms.py +++ b/rocky/reports/forms.py @@ -38,6 +38,16 @@ class ReportScheduleStartDateChoiceForm(BaseRockyForm): ) +class ReportRecurrenceChoiceForm(BaseRockyForm): + choose_recurrence = forms.ChoiceField( + label="", + required=False, + widget=forms.RadioSelect(attrs={"class": "submit-on-click"}), + choices=(("once", _("No, just once")), ("repeat", _("Yes, repeat"))), + initial="once", + ) + + class ReportScheduleStartDateForm(BaseRockyForm): start_date = forms.DateField( label=_("Start date"), @@ -47,18 +57,14 @@ class ReportScheduleStartDateForm(BaseRockyForm): input_formats=["%Y-%m-%d"], ) - -class ReportRecurrenceChoiceForm(BaseRockyForm): - choose_recurrence = forms.ChoiceField( - label="", - required=False, - widget=forms.RadioSelect(attrs={"class": "submit-on-click"}), - choices=(("once", _("No, just once")), ("repeat", _("Yes, repeat"))), - initial="once", + start_time = forms.TimeField( + label=_("Start time (UTC)"), + widget=forms.TimeInput(format="%H:%M", attrs={"form": "generate_report"}), + initial=lambda: datetime.now(tz=timezone.utc).time(), + required=True, + input_formats=["%H:%M"], ) - -class ReportScheduleRecurrenceForm(BaseRockyForm): recurrence = forms.ChoiceField( label=_("Recurrence"), required=True, @@ -66,6 +72,17 @@ class ReportScheduleRecurrenceForm(BaseRockyForm): choices=[("daily", _("Daily")), ("weekly", _("Weekly")), ("monthly", _("Monthly")), ("yearly", _("Yearly"))], ) + def clean(self): + cleaned_data = super().clean() + start_date = cleaned_data.get("start_date") + start_time = cleaned_data.get("start_time") + + if start_date and start_time: + start_datetime = datetime.combine(start_date, start_time) + cleaned_data["start_datetime"] = start_datetime + + return cleaned_data + class CustomReportScheduleForm(BaseRockyForm): start_date = forms.DateField( diff --git a/rocky/reports/report_types/definitions.py b/rocky/reports/report_types/definitions.py index 112d6807c80..ec7c4be9ed6 100644 --- a/rocky/reports/report_types/definitions.py +++ b/rocky/reports/report_types/definitions.py @@ -17,6 +17,11 @@ class ReportPlugins(TypedDict): optional: set[str] +class SubReportPlugins(TypedDict): + required: list[str] + optional: list[str] + + def report_plugins_union(report_types: list[type["BaseReport"]]) -> ReportPlugins: """Take the union of the required and optional plugin sets and remove optional plugins that are required""" diff --git a/rocky/reports/report_types/findings_report/report.html b/rocky/reports/report_types/findings_report/report.html index 35cfa3782f7..6eecba51e50 100644 --- a/rocky/reports/report_types/findings_report/report.html +++ b/rocky/reports/report_types/findings_report/report.html @@ -81,5 +81,5 @@
{% translate "No findings have been found." %}
+{% translate "No findings have been identified yet." %}
{% endif %} diff --git a/rocky/reports/templates/partials/export_report_settings.html b/rocky/reports/templates/partials/export_report_settings.html index 09e5384b0ae..0937a6a5ae5 100644 --- a/rocky/reports/templates/partials/export_report_settings.html +++ b/rocky/reports/templates/partials/export_report_settings.html @@ -21,6 +21,12 @@+ {% blocktranslate trimmed %} + Please choose a start date, time and recurrence for scheduling your report(s). + If you select a date on the 28th-31st of the month, it will always be scheduled on the last day of the month. + {% endblocktranslate %} +
{% blocktranslate trimmed %} The date you select will be the reference date for the data set for your report. @@ -28,14 +34,8 @@
{% translate "Currently there are no findings for OOI" %} "{{ ooi.human_readable }}".
+{% translate "Currently no findings have been identified for OOI" %} "{{ ooi.human_readable }}".
{% translate "Add finding" %} {% endif %} diff --git a/rocky/rocky/templates/organizations/organization_crisis_room.html b/rocky/rocky/templates/organizations/organization_crisis_room.html index cb7cb0d66ee..bdf47c846c5 100644 --- a/rocky/rocky/templates/organizations/organization_crisis_room.html +++ b/rocky/rocky/templates/organizations/organization_crisis_room.html @@ -26,58 +26,62 @@- {% translate "Showing " %}{{ object_list|length }} {% translate "of" %} {{ paginator.count }} {% translate "findings" %} -
-{% translate "Severity" %} | -{% translate "Finding" %} | -{% translate "Details" %} | -|||
---|---|---|---|---|---|
- {{ ft.risk_severity|capfirst }} - | -- {{ finding.human_readable }} - | -- - | -|||
- {% translate "Finding type:" %}- {{ ft.human_readable }} -{% translate "OOI type:" %}- {{ ooi.object_type }} -{% translate "Source OOI:" %}- {{ ooi.human_readable }} - |
-
+ {% translate "Showing " %}{{ object_list|length }} {% translate "of" %} {{ paginator.count }} {% translate "findings" %} +
+{% translate "Severity" %} | +{% translate "Finding" %} | +{% translate "Details" %} | +|||
---|---|---|---|---|---|
+ {{ ft.risk_severity|capfirst }} + | ++ {{ finding.human_readable }} + | ++ + | +|||
+ {% translate "Finding type:" %}+ {{ ft.human_readable }} +{% translate "OOI type:" %}+ {{ ooi.object_type }} +{% translate "Source OOI:" %}+ {{ ooi.human_readable }} + |
+
{% translate "No findings have been identified yet." %}
+ {% endif %} diff --git a/rocky/rocky/views/scheduler.py b/rocky/rocky/views/scheduler.py index 7cdc67f8988..654d09acea9 100644 --- a/rocky/rocky/views/scheduler.py +++ b/rocky/rocky/views/scheduler.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime, timezone +from datetime import datetime from typing import Any from django.contrib import messages @@ -10,7 +10,6 @@ ChildReportNameForm, ParentReportNameForm, ReportRecurrenceChoiceForm, - ReportScheduleRecurrenceForm, ReportScheduleStartDateChoiceForm, ReportScheduleStartDateForm, ) @@ -48,10 +47,9 @@ class SchedulerView(OctopoesView): task_filter_form = TaskFilterForm report_schedule_form_start_date_choice = ReportScheduleStartDateChoiceForm # today or different date - report_schedule_form_start_date = ReportScheduleStartDateForm # date widget + report_schedule_form_start_date_time_recurrence = ReportScheduleStartDateForm # date, time and recurrence report_schedule_form_recurrence_choice = ReportRecurrenceChoiceForm # once or repeat - report_schedule_form_recurrence = ReportScheduleRecurrenceForm # select interval (daily, weekly, etc..) report_parent_name_form = ParentReportNameForm # parent name format report_child_name_form = ChildReportNameForm # child name format @@ -96,15 +94,12 @@ def get_task_list(self) -> LazyTaskList | list[Any]: def get_report_schedule_form_start_date_choice(self): return self.report_schedule_form_start_date_choice(self.request.POST) - def get_report_schedule_form_start_date(self): - return self.report_schedule_form_start_date() + def get_report_schedule_form_start_date_time_recurrence(self): + return self.report_schedule_form_start_date_time_recurrence() def get_report_schedule_form_recurrence_choice(self): return self.report_schedule_form_recurrence_choice(self.request.POST) - def get_report_schedule_form_recurrence(self): - return self.report_schedule_form_recurrence() - def get_report_parent_name_form(self): return self.report_parent_name_form() @@ -121,7 +116,7 @@ def get_task_details(self, task_id: str) -> Task | None: except SchedulerTaskNotFound: raise Http404() - def create_report_schedule(self, report_recipe: ReportRecipe, deadline_at: str) -> ScheduleResponse | None: + def create_report_schedule(self, report_recipe: ReportRecipe, deadline_at: datetime) -> ScheduleResponse | None: try: report_task = ReportTask( organisation_id=self.organization.code, report_recipe_id=str(report_recipe.recipe_id) @@ -131,7 +126,7 @@ def create_report_schedule(self, report_recipe: ReportRecipe, deadline_at: str) scheduler_id=self.scheduler_id, data=report_task, schedule=report_recipe.cron_expression, - deadline_at=deadline_at, + deadline_at=str(deadline_at), ) submit_schedule = self.scheduler_client.post_schedule(schedule=schedule_request) @@ -269,28 +264,31 @@ def run_boefje_for_oois(self, boefje: Boefje, oois: list[OOI]) -> None: except SchedulerError as error: messages.error(self.request, error.message) - def convert_recurrence_to_cron_expressions(self, recurrence: str) -> str: + def convert_recurrence_to_cron_expressions(self, recurrence: str, start_date_time: datetime) -> str: """ - Because there is no time defined for the start date, we use midnight 00:00 for all expressions. + The user defines the start date and time. """ - start_date = datetime.now(tz=timezone.utc).date() # for now, not set by user - - if start_date and recurrence: - day = start_date.day - month = start_date.month - week = start_date.strftime("%w").upper() # ex. 4 + if start_date_time and recurrence: + day = start_date_time.day + month = start_date_time.month + week = start_date_time.strftime("%w").upper() # ex. 4 + hour = start_date_time.hour + minute = start_date_time.minute cron_expr = { - "daily": "0 0 * * *", # Recurres every day at 00:00 - "weekly": f"0 0 * * {week}", # Recurres every week on the {week} at 00:00 - "yearly": f"0 0 {day} {month} *", # Recurres every year on the {day} of the {month} at 00:00 + "daily": f"{minute} {hour} * * *", # Recurres every day at the selected time + "weekly": f"{minute} {hour} * * {week}", # Recurres every week on the {week} at the selected time + "yearly": f"{minute} {hour} {day} {month} *", + # Recurres every year on the {day} of the {month} at the selected time } - if 28 <= day <= 31: - cron_expr["monthly"] = "0 0 28-31 * *" + if day >= 28: + cron_expr["monthly"] = f"{minute} {hour} L * *" else: - cron_expr["monthly"] = f"0 0 {day} * *" # Recurres on the exact {day} of the month at 00:00 + cron_expr["monthly"] = ( + f"{minute} {hour} {day} * *" # Recurres on the exact {day} of the month at the selected time + ) return cron_expr.get(recurrence, "") return "" diff --git a/rocky/tests/conftest.py b/rocky/tests/conftest.py index f73c57d106b..cde70e1abbf 100644 --- a/rocky/tests/conftest.py +++ b/rocky/tests/conftest.py @@ -2340,3 +2340,217 @@ def get_report_input_data_from_bytes(): } } return json.dumps(input_data).encode("utf-8") + + +@pytest.fixture +def aggregate_report_with_sub_reports(): + aggregate_report: Paginated[tuple[Report, list[Report | None]]] = Paginated( + count=1, + items=[ + ( + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|23820a64-db8f-41b7-b045-031338fbb91d", + name="Aggregate Report", + report_type="aggregate-organisation-report", + template="aggregate_organisation_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("23820a64-db8f-41b7-b045-031338fbb91d"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="3a362cd7-6348-4e91-8a6f-4cd83f9f6a83", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=None, + report_recipe=None, + has_parent=False, + ), + [ + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|28c7b15e-6dda-49e8-a101-41df3124287e", + name="Mail Report", + report_type="mail-report", + template="mail_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("28c7b15e-6dda-49e8-a101-41df3124287e"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="a534b4d5-5dba-4ddc-9b77-970675ae4b1c", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|2a56737f-492f-424b-88cc-0029ce2a444b", + name="IPv6 Report", + report_type="ipv6-report", + template="ipv6_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("2a56737f-492f-424b-88cc-0029ce2a444b"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="0bdea8eb-7ac0-46ef-ad14-ea3b0bfe1030", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|4ec12350-7552-40de-8c9f-f75ac04b99cb", + name="RPKI Report", + report_type="rpki-report", + template="rpki_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("4ec12350-7552-40de-8c9f-f75ac04b99cb"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="53d5452c-9e67-42d2-9cb0-3b684d8967a2", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|8137a050-f897-45ce-a695-fd21c63e2e5c", + name="Safe Connections Report", + report_type="safe-connections-report", + template="safe_connections_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("8137a050-f897-45ce-a695-fd21c63e2e5c"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="a218ca79-47de-4473-a93d-54d14baadd98", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|9ca7ad01-e19e-42c9-9361-751db4399b94", + name="Web System Report", + report_type="web-system-report", + template="web_system_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("9ca7ad01-e19e-42c9-9361-751db4399b94"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="3779f5b0-3adf-41c8-9630-8eed8a857ae6", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|a76878ba-55e0-4971-b645-63cfdfd34e78", + name="Open Ports Report", + report_type="open-ports-report", + template="open_ports_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("a76878ba-55e0-4971-b645-63cfdfd34e78"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="851feeab-7036-48f6-81ef-599467c52457", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|ad33bbf1-bd35-4cb4-a61d-ebe1409e2f67", + name="Vulnerability Report", + report_type="vulnerability-report", + template="vulnerability_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("ad33bbf1-bd35-4cb4-a61d-ebe1409e2f67"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="1e259fce-3cd7-436f-b233-b4ae24a8f11b", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|bd26a0c0-92c2-4323-977d-a10bd90619e7", + name="System Report", + report_type="systems-report", + template="systems_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("bd26a0c0-92c2-4323-977d-a10bd90619e7"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="50a9e4df-3b69-4ad8-b798-df626162db5a", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + Report( + object_type="Report", + scan_profile=None, + user_id=None, + primary_key="Report|d8fcaa8f-65ca-4304-a18c-078767b37bcb", + name="Name Server Report", + report_type="name-server-report", + template="name_server_report/report.html", + date_generated=datetime(2024, 11, 21, 10, 7, 7, 441137), + input_oois=["Hostname|internet|mispo.es"], + report_id=UUID("d8fcaa8f-65ca-4304-a18c-078767b37bcb"), + organization_code="_rieven", + organization_name="Rieven", + organization_tags=[], + data_raw_id="5faa3364-c8b2-4b9c-8cc8-99d8f19ccf8a", + observed_at=datetime(2024, 11, 21, 10, 7, 7, 441043), + parent_report=Reference("Report|23820a64-db8f-41b7-b045-031338fbb91d"), + report_recipe=None, + has_parent=True, + ), + ], + ) + ], + ) + return aggregate_report diff --git a/rocky/tests/reports/test_aggregate_report_flow.py b/rocky/tests/reports/test_aggregate_report_flow.py index 49b4e67caa5..bd2052ac1a8 100644 --- a/rocky/tests/reports/test_aggregate_report_flow.py +++ b/rocky/tests/reports/test_aggregate_report_flow.py @@ -271,6 +271,7 @@ def test_save_aggregate_report_view_scheduled( "report_type": ["systems-report", "vulnerability-report"], "choose_recurrence": "repeat", "start_date": "2024-01-01", + "start_time": "10:10", "recurrence": "weekly", "parent_report_name": ["Scheduled Aggregate Report %x"], }, diff --git a/rocky/tests/reports/test_generate_report_flow.py b/rocky/tests/reports/test_generate_report_flow.py index 34ae22a846a..176746a88bc 100644 --- a/rocky/tests/reports/test_generate_report_flow.py +++ b/rocky/tests/reports/test_generate_report_flow.py @@ -258,6 +258,7 @@ def test_save_generate_report_view_scheduled( "report_type": "dns-report", "choose_recurrence": "repeat", "start_date": "2024-01-01", + "start_time": "10:10", "recurrence": "daily", "parent_report_name": [f"DNS report for {len(listed_hostnames)} objects"], }, diff --git a/rocky/tests/reports/test_report_overview.py b/rocky/tests/reports/test_report_overview.py index c4fc478c15e..ba737aaa017 100644 --- a/rocky/tests/reports/test_report_overview.py +++ b/rocky/tests/reports/test_report_overview.py @@ -134,3 +134,35 @@ def test_report_overview_rerun_reports( assert list(request._messages)[0].message == "Rerun successful" assertContains(response, concatenated_report.name) + + +def test_aggregate_report_has_sub_reports( + rf, client_member, mock_organization_view_octopoes, mock_bytes_client, aggregate_report_with_sub_reports +): + mock_organization_view_octopoes().list_reports.return_value = aggregate_report_with_sub_reports + + aggregate_report, subreports = aggregate_report_with_sub_reports.items[0] + + response = ReportHistoryView.as_view()( + setup_request(rf.get("report_history"), client_member.user), organization_code=client_member.organization.code + ) + + assert response.status_code == 200 + + assertContains(response, "Nov. 21, 2024") + assertContains(response, "Nov. 21, 2024, 10:07 a.m.") + + assertContains(response, "expando-button icon ti-chevron-down") + + assertContains( + response, f"This report consist of {len(subreports)} subreports with the following report types and objects." + ) + + assertContains(response, f"Subreports (5/{len(subreports)})", html=True) + + assertContains(response, aggregate_report.name) + + for subreport in subreports: + assertContains(response, subreport.name) + + assertContains(response, "View all subreports")