From 7a1ea84acfbe6de08329bd9087a477ff02112319 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 5 Feb 2024 23:04:46 +0000 Subject: [PATCH 01/34] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/package.json b/components/package.json index 6cb8985b650..9a57f7b78dd 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "3.31.0", + "version": "2.32.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 03977b720f8..f1c39c15ed1 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '3.31.0' +__version__ = '2.32.0-dev' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 50342639af2..53bce7bc759 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "3.31.0" +appVersion: "2.32.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.108 +version: 1.6.109-dev icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap From 738dca4534382049ca7587a60fb6b33715213943 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 5 Feb 2024 17:57:51 -0600 Subject: [PATCH 02/34] Update versions --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/components/package.json b/components/package.json index 6cb8985b650..6bff77e4dc6 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "3.31.0", + "version": "2.31.0", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 03977b720f8..c79303a1f28 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '3.31.0' +__version__ = '2.31.0' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 50342639af2..26edb33e5ef 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: "3.31.0" +appVersion: "2.31.0" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo version: 1.6.108 From 74665d7d3e83c5eedab054737e1a2578a9355d5b Mon Sep 17 00:00:00 2001 From: Colm O hEigeartaigh Date: Tue, 6 Feb 2024 23:01:07 +0000 Subject: [PATCH 03/34] Parse GitHub vulnerability version (#9462) --- dojo/tools/github_vulnerability/parser.py | 3 + .../github-vuln-version.json | 106 ++++++++++++++++++ .../tools/test_github_vulnerability_parser.py | 15 +++ 3 files changed, 124 insertions(+) create mode 100644 unittests/scans/github_vulnerability/github-vuln-version.json diff --git a/dojo/tools/github_vulnerability/parser.py b/dojo/tools/github_vulnerability/parser.py index 15bf37606c9..3c134342d20 100644 --- a/dojo/tools/github_vulnerability/parser.py +++ b/dojo/tools/github_vulnerability/parser.py @@ -66,6 +66,9 @@ def get_findings(self, filename, test): if "vulnerableManifestPath" in alert: finding.file_path = alert["vulnerableManifestPath"] + if "vulnerableRequirements" in alert and alert["vulnerableRequirements"].startswith("= "): + finding.component_version = alert["vulnerableRequirements"][2:] + if "createdAt" in alert: finding.date = dateutil.parser.parse(alert["createdAt"]) diff --git a/unittests/scans/github_vulnerability/github-vuln-version.json b/unittests/scans/github_vulnerability/github-vuln-version.json new file mode 100644 index 00000000000..e80afe7e583 --- /dev/null +++ b/unittests/scans/github_vulnerability/github-vuln-version.json @@ -0,0 +1,106 @@ +{ + "data": { + "repository": { + "vulnerabilityAlerts": { + "nodes": [ + { + "id": "RVA_kwDOLJyUo88AAAABQUWapw", + "createdAt": "2024-01-26T02:42:32Z", + "vulnerableManifestPath": "sompath/pom.xml", + "securityVulnerability": { + "severity": "CRITICAL", + "updatedAt": "2022-12-09T22:02:22Z", + "package": { + "name": "org.springframework:spring-web", + "ecosystem": "MAVEN" + }, + "firstPatchedVersion": { + "identifier": "6.0.0" + }, + "vulnerableVersionRange": "< 6.0.0", + "advisory": { + "description": "Pivotal Spring Framework before 6.0.0 suffers from a potential remote code execution (RCE) issue if used for Java deserialization of untrusted data. Depending on how the library is implemented within a product, this issue may or not occur, and authentication may be required.\n\nMaintainers recommend investigating alternative components or a potential mitigating control. Version 4.2.6 and 3.2.17 contain [enhanced documentation](https://github.com/spring-projects/spring-framework/commit/5cbe90b2cd91b866a5a9586e460f311860e11cfa) advising users to take precautions against unsafe Java deserialization, version 5.3.0 [deprecate the impacted classes](https://github.com/spring-projects/spring-framework/issues/25379) and version 6.0.0 [removed it entirely](https://github.com/spring-projects/spring-framework/issues/27422).", + "summary": "Pivotal Spring Framework contains unsafe Java deserialization methods", + "identifiers": [ + { + "value": "GHSA-4wrc-f8pq-fpqp", + "type": "GHSA" + }, + { + "value": "CVE-2016-1000027", + "type": "CVE" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-1000027" + }, + { + "url": "https://bugzilla.redhat.com/show_bug.cgi?id=CVE-2016-1000027" + }, + { + "url": "https://security-tracker.debian.org/tracker/CVE-2016-1000027" + }, + { + "url": "https://www.tenable.com/security/research/tra-2016-20" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-1231625331" + }, + { + "url": "https://github.com/spring-projects/spring-framework/commit/5cbe90b2cd91b866a5a9586e460f311860e11cfa" + }, + { + "url": "https://support.contrastsecurity.com/hc/en-us/articles/4402400830612-Spring-web-Java-Deserialization-CVE-2016-1000027" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/21680" + }, + { + "url": "https://github.com/spring-projects/spring-framework/commit/2b051b8b321768a4cfef83077db65c6328ffd60f" + }, + { + "url": "https://jira.spring.io/browse/SPR-17143?redirect=false" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-579669626" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-582313417" + }, + { + "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-744519525" + }, + { + "url": "https://security.netapp.com/advisory/ntap-20230420-0009/" + }, + { + "url": "https://spring.io/blog/2022/05/11/spring-framework-5-3-20-and-5-2-22-available-now" + }, + { + "url": "https://github.com/advisories/GHSA-4wrc-f8pq-fpqp" + } + ], + "cvss": { + "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" + } + } + }, + "state": "OPEN", + "vulnerableManifestFilename": "pom.xml", + "vulnerableRequirements": "= 5.3.29", + "number": 1, + "dependencyScope": "RUNTIME", + "dismissComment": null, + "dismissReason": null, + "dismissedAt": null, + "fixedAt": null + } + ] + } + } + } +} diff --git a/unittests/tools/test_github_vulnerability_parser.py b/unittests/tools/test_github_vulnerability_parser.py index acc955e3492..1453c02a39b 100644 --- a/unittests/tools/test_github_vulnerability_parser.py +++ b/unittests/tools/test_github_vulnerability_parser.py @@ -251,3 +251,18 @@ def test_parse_state(self): self.assertEqual(finding.file_path, "apache/cxf/cxf-shiro/pom.xml") self.assertEqual(finding.active, False) self.assertEqual(finding.is_mitigated, True) + + def test_parser_version(self): + testfile = open("unittests/scans/github_vulnerability/github-vuln-version.json") + parser = GithubVulnerabilityParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(1, len(findings)) + for finding in findings: + finding.clean() + + with self.subTest(i=0): + finding = findings[0] + self.assertEqual(finding.title, "Pivotal Spring Framework contains unsafe Java deserialization methods") + self.assertEqual(finding.severity, "Critical") + self.assertEqual(finding.component_name, "org.springframework:spring-web") + self.assertEqual(finding.component_version, "5.3.29") From 983d7eef24b001c10ea2162413be2f19061ccd58 Mon Sep 17 00:00:00 2001 From: Andrei Serebriakov Date: Wed, 7 Feb 2024 02:06:47 +0300 Subject: [PATCH 04/34] Fix SARIF parser with CodeQL rules (#9440) * fix for sarif parser with codeql rules * add check for extensions property * flake8 comparsion --- dojo/tools/sarif/parser.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dojo/tools/sarif/parser.py b/dojo/tools/sarif/parser.py index 14d81849570..e7963612b44 100644 --- a/dojo/tools/sarif/parser.py +++ b/dojo/tools/sarif/parser.py @@ -77,7 +77,10 @@ def __get_last_invocation_date(self, data): def get_rules(run): rules = {} - for item in run["tool"]["driver"].get("rules", []): + rules_array = run["tool"]["driver"].get("rules", []) + if len(rules_array) == 0 and run["tool"].get("extensions") is not None: + rules_array = run["tool"]["extensions"][0].get("rules", []) + for item in rules_array: rules[item["id"]] = item return rules From 57bd0566ce08687e42496af3fd4391b2715db924 Mon Sep 17 00:00:00 2001 From: Blake Owens <76979297+blakeaowens@users.noreply.github.com> Date: Fri, 9 Feb 2024 14:34:06 -0600 Subject: [PATCH 05/34] finding sla expiration date field (part two) (#9494) * finding sla expiration date field (part two) * sla violation check updates * clean up of finding violates_sla property * flake8 fix * Update dojo/models.py Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> * Update 0201_populate_finding_sla_expiration_date.py --------- Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> --- ...01_populate_finding_sla_expiration_date.py | 133 ++++++++++++++++++ dojo/filters.py | 17 +-- dojo/models.py | 41 +++--- 3 files changed, 160 insertions(+), 31 deletions(-) create mode 100644 dojo/db_migrations/0201_populate_finding_sla_expiration_date.py diff --git a/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py b/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py new file mode 100644 index 00000000000..4b886301de7 --- /dev/null +++ b/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py @@ -0,0 +1,133 @@ +from django.db import migrations +from django.utils import timezone +from datetime import datetime +from django.conf import settings +from dateutil.relativedelta import relativedelta +import logging + +from dojo.utils import get_work_days + +logger = logging.getLogger(__name__) + + +def calculate_sla_expiration_dates(apps, schema_editor): + System_Settings = apps.get_model('dojo', 'System_Settings') + + ss, _ = System_Settings.objects.get_or_create() + if not ss.enable_finding_sla: + return + + logger.info('Calculating SLA expiration dates for all findings') + + SLA_Configuration = apps.get_model('dojo', 'SLA_Configuration') + Finding = apps.get_model('dojo', 'Finding') + + findings = Finding.objects.filter(sla_expiration_date__isnull=True).order_by('id').only('id', 'sla_start_date', 'date', 'severity', 'test', 'mitigated') + + page_size = 1000 + total_count = Finding.objects.filter(id__gt=0).count() + logger.info('Found %d findings to be updated', total_count) + + i = 0 + batch = [] + last_id = 0 + total_pages = (total_count // page_size) + 2 + for p in range(1, total_pages): + page = findings.filter(id__gt=last_id)[:page_size] + for find in page: + i += 1 + last_id = find.id + + start_date = find.sla_start_date if find.sla_start_date else find.date + + sla_config = SLA_Configuration.objects.filter(id=find.test.engagement.product.sla_configuration_id).first() + sla_period = getattr(sla_config, find.severity.lower(), None) + + days = None + if settings.SLA_BUSINESS_DAYS: + if find.mitigated: + days = get_work_days(find.date, find.mitigated.date()) + else: + days = get_work_days(find.date, timezone.now().date()) + else: + if isinstance(start_date, datetime): + start_date = start_date.date() + + if find.mitigated: + days = (find.mitigated.date() - start_date).days + else: + days = (timezone.now().date() - start_date).days + + days = days if days > 0 else 0 + + days_remaining = None + if sla_period: + days_remaining = sla_period - days + + if days_remaining: + if find.mitigated: + find.sla_expiration_date = find.mitigated.date() + relativedelta(days=days_remaining) + else: + find.sla_expiration_date = timezone.now().date() + relativedelta(days=days_remaining) + + batch.append(find) + + if (i > 0 and i % page_size == 0): + Finding.objects.bulk_update(batch, ['sla_expiration_date']) + batch = [] + logger.info('%s out of %s findings processed...', i, total_count) + + Finding.objects.bulk_update(batch, ['sla_expiration_date']) + batch = [] + logger.info('%s out of %s findings processed...', i, total_count) + + +def reset_sla_expiration_dates(apps, schema_editor): + System_Settings = apps.get_model('dojo', 'System_Settings') + + ss, _ = System_Settings.objects.get_or_create() + if not ss.enable_finding_sla: + return + + logger.info('Resetting SLA expiration dates for all findings') + + Finding = apps.get_model('dojo', 'Finding') + + findings = Finding.objects.filter(sla_expiration_date__isnull=False).order_by('id').only('id') + + page_size = 1000 + total_count = Finding.objects.filter(id__gt=0).count() + logger.info('Found %d findings to be reset', total_count) + + i = 0 + batch = [] + last_id = 0 + total_pages = (total_count // page_size) + 2 + for p in range(1, total_pages): + page = findings.filter(id__gt=last_id)[:page_size] + for find in page: + i += 1 + last_id = find.id + + find.sla_expiration_date = None + batch.append(find) + + if (i > 0 and i % page_size == 0): + Finding.objects.bulk_update(batch, ['sla_expiration_date']) + batch = [] + logger.info('%s out of %s findings processed...', i, total_count) + + Finding.objects.bulk_update(batch, ['sla_expiration_date']) + batch = [] + logger.info('%s out of %s findings processed...', i, total_count) + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0200_finding_sla_expiration_date_product_async_updating_and_more'), + ] + + operations = [ + migrations.RunPython(calculate_sla_expiration_dates, reset_sla_expiration_dates), + ] diff --git a/dojo/filters.py b/dojo/filters.py index 51279d76a9a..723c52337f3 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -11,6 +11,7 @@ from django.conf import settings import six from django.utils.translation import gettext_lazy as _ +from django.utils import timezone from django_filters import FilterSet, CharFilter, OrderingFilter, \ ModelMultipleChoiceFilter, ModelChoiceFilter, MultipleChoiceFilter, \ BooleanFilter, NumberFilter, DateFilter @@ -148,16 +149,12 @@ def any(self, qs, name): return qs def sla_satisfied(self, qs, name): - for finding in qs: - if finding.violates_sla: - qs = qs.exclude(id=finding.id) - return qs + # return findings that have an sla expiration date after today or no sla expiration date + return qs.filter(Q(sla_expiration_date__isnull=True) | Q(sla_expiration_date__gt=timezone.now().date())) def sla_violated(self, qs, name): - for finding in qs: - if not finding.violates_sla: - qs = qs.exclude(id=finding.id) - return qs + # return active findings that have an sla expiration date before today + return qs.filter(Q(active=True) & Q(sla_expiration_date__lt=timezone.now().date())) options = { None: (_('Any'), any), @@ -184,13 +181,13 @@ def any(self, qs, name): def sla_satisifed(self, qs, name): for product in qs: - if product.violates_sla: + if product.violates_sla(): qs = qs.exclude(id=product.id) return qs def sla_violated(self, qs, name): for product in qs: - if not product.violates_sla: + if not product.violates_sla(): qs = qs.exclude(id=product.id) return qs diff --git a/dojo/models.py b/dojo/models.py index 7bda3997c0c..45d522963ee 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -1102,7 +1102,7 @@ def findings_active_verified_count(self): @cached_property def endpoint_host_count(self): # active_endpoints is (should be) prefetched - endpoints = self.active_endpoints + endpoints = getattr(self, 'active_endpoints', None) hosts = [] for e in endpoints: @@ -1116,7 +1116,10 @@ def endpoint_host_count(self): @cached_property def endpoint_count(self): # active_endpoints is (should be) prefetched - return len(self.active_endpoints) + endpoints = getattr(self, 'active_endpoints', None) + if endpoints: + return len(self.active_endpoints) + return None def open_findings(self, start_date=None, end_date=None): if start_date is None or end_date is None: @@ -1192,13 +1195,11 @@ def get_absolute_url(self): from django.urls import reverse return reverse('view_product', args=[str(self.id)]) - @property def violates_sla(self): - findings = Finding.objects.filter(test__engagement__product=self, active=True) - for f in findings: - if f.violates_sla: - return True - return False + findings = Finding.objects.filter(test__engagement__product=self, + active=True, + sla_expiration_date__lt=timezone.now().date()) + return findings.count() > 0 class Product_Member(models.Model): @@ -2887,20 +2888,19 @@ def set_sla_expiration_date(self): self.sla_expiration_date = get_current_date() + relativedelta(days=days_remaining) def sla_days_remaining(self): - sla_calculation = None - sla_period = self.get_sla_period() - if sla_period: - sla_calculation = sla_period - self.sla_age - return sla_calculation - - def sla_deadline(self): - days_remaining = self.sla_days_remaining() - if days_remaining: + if self.sla_expiration_date: if self.mitigated: - return self.mitigated.date() + relativedelta(days=days_remaining) - return get_current_date() + relativedelta(days=days_remaining) + mitigated_date = self.mitigated + if isinstance(mitigated_date, datetime): + mitigated_date = self.mitigated.date() + return (self.sla_expiration_date - mitigated_date).days + else: + return (self.sla_expiration_date - get_current_date()).days return None + def sla_deadline(self): + return self.sla_expiration_date + def github(self): try: return self.github_issue @@ -3294,8 +3294,7 @@ def inherit_tags(self, potentially_existing_tags): @property def violates_sla(self): - days_remaining = self.sla_days_remaining() - return days_remaining < 0 if days_remaining else False + return (self.sla_expiration_date and self.sla_expiration_date < timezone.now()) class FindingAdmin(admin.ModelAdmin): From 00db247d5c02bc934f7faa39271f2536477b6d1a Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Sun, 11 Feb 2024 20:42:39 -0600 Subject: [PATCH 06/34] Jira Server/DataCenter: Update meta methods (#9512) --- dojo/jira_link/helper.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 8a8b208d45f..ecd5da084f8 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -1036,28 +1036,28 @@ def get_issuetype_fields( else: try: - issuetypes = jira.createmeta_issuetypes(project_key) + issuetypes = jira.project_issue_types(project_key) except JIRAError as e: e.text = f"Jira API call 'createmeta/issuetypes' failed with status: {e.status_code} and message: {e.text}. Project misconfigured or no permissions in Jira ?" raise e issuetype_id = None - for it in issuetypes['values']: - if it['name'] == issuetype_name: - issuetype_id = it['id'] + for it in issuetypes: + if it.name == issuetype_name: + issuetype_id = it.id break if not issuetype_id: raise JIRAError("Issue type ID can not be matched. Misconfigured default issue type ?") try: - issuetype_fields = jira.createmeta_fieldtypes(project_key, issuetype_id) + issuetype_fields = jira.project_issue_fields(project_key, issuetype_id) except JIRAError as e: e.text = f"Jira API call 'createmeta/fieldtypes' failed with status: {e.status_code} and message: {e.text}. Misconfigured project or default issue type ?" raise e try: - issuetype_fields = [f['fieldId'] for f in issuetype_fields['values']] + issuetype_fields = [f.fieldId for f in issuetype_fields] except Exception: raise JIRAError("Misconfigured default issue type ?") From 164c09c4c778792013dd450f0fb73b0bab368145 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Sun, 11 Feb 2024 20:43:49 -0600 Subject: [PATCH 07/34] Jira Webhook: Catch comments from other issue updates (#9513) * Jira Webhook: Catch comments from other issue updates * Accommodate redirect responses * Update dojo/jira_link/views.py Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> * Fix syntax --------- Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> --- dojo/jira_link/views.py | 203 ++++++++++++++++++++++------------------ 1 file changed, 113 insertions(+), 90 deletions(-) diff --git a/dojo/jira_link/views.py b/dojo/jira_link/views.py index e05ea5ce219..a1a73f0b015 100644 --- a/dojo/jira_link/views.py +++ b/dojo/jira_link/views.py @@ -1,7 +1,7 @@ # Standard library imports import json import logging - +import datetime # Third party imports from django.contrib import messages from django.contrib.admin.utils import NestedObjects @@ -105,97 +105,13 @@ def webhook(request, secret=None): if findings: for finding in findings: jira_helper.process_resolution_from_jira(finding, resolution_id, resolution_name, assignee_name, jira_now, jissue) + # Check for any comment that could have come along with the resolution + if (error_response := check_for_and_create_comment(parsed)) is not None: + return error_response if parsed.get('webhookEvent') == 'comment_created': - """ - example incoming requests from JIRA Server 8.14.0 - { - "timestamp":1610269967824, - "webhookEvent":"comment_created", - "comment":{ - "self":"https://jira.host.com/rest/api/2/issue/115254/comment/466578", - "id":"466578", - "author":{ - "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo", - "name":"defect.dojo", - "key":"defect.dojo", # seems to be only present on JIRA Server, not on Cloud - "avatarUrls":{ - "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48", - "24x24":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24", - "16x16":"https://www.gravatar.com/avatar9637bfb970eff6176357df615f548f1c?d=mm&s=16", - "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32" - }, - "displayName":"Defect Dojo", - "active":true, - "timeZone":"Europe/Amsterdam" - }, - "body":"(Valentijn Scholten):test4", - "updateAuthor":{ - "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo", - "name":"defect.dojo", - "key":"defect.dojo", - "avatarUrls":{ - "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48", - "24x24""https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24", - "16x16":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=16", - "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32" - }, - "displayName":"Defect Dojo", - "active":true, - "timeZone":"Europe/Amsterdam" - }, - "created":"2021-01-10T10:12:47.824+0100", - "updated":"2021-01-10T10:12:47.824+0100" - } - } - """ - - comment_text = parsed['comment']['body'] - commentor = '' - if 'name' in parsed['comment']['updateAuthor']: - commentor = parsed['comment']['updateAuthor']['name'] - elif 'emailAddress' in parsed['comment']['updateAuthor']: - commentor = parsed['comment']['updateAuthor']['emailAddress'] - else: - logger.debug('Could not find the author of this jira comment!') - commentor_display_name = parsed['comment']['updateAuthor']['displayName'] - # example: body['comment']['self'] = "http://www.testjira.com/jira_under_a_path/rest/api/2/issue/666/comment/456843" - jid = parsed['comment']['self'].split('/')[-3] - jissue = get_object_or_404(JIRA_Issue, jira_id=jid) - logging.info(f"Received issue comment for {jissue.jira_key}") - logger.debug('jissue: %s', vars(jissue)) - - jira_usernames = JIRA_Instance.objects.values_list('username', flat=True) - for jira_userid in jira_usernames: - # logger.debug('incoming username: %s jira config username: %s', commentor.lower(), jira_userid.lower()) - if jira_userid.lower() == commentor.lower(): - logger.debug('skipping incoming JIRA comment as the user id of the comment in JIRA (%s) matches the JIRA username in DefectDojo (%s)', commentor.lower(), jira_userid.lower()) - return HttpResponse('') - - findings = None - if jissue.finding: - findings = [jissue.finding] - create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding", args=(jissue.finding.id,)), icon='check') - - elif jissue.finding_group: - findings = [jissue.finding_group.findings.all()] - create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding_group", args=(jissue.finding_group.id,)), icon='check') - - elif jissue.engagement: - return HttpResponse('Comment for engagement ignored') - else: - raise Http404(f'No finding or engagement found for JIRA issue {jissue.jira_key}') - - for finding in findings: - # logger.debug('finding: %s', vars(jissue.finding)) - new_note = Notes() - new_note.entry = f'({commentor_display_name} ({commentor})): {comment_text}' - new_note.author, created = User.objects.get_or_create(username='JIRA') - new_note.save() - finding.notes.add(new_note) - finding.jira_issue.jira_change = timezone.now() - finding.jira_issue.save() - finding.save() + if (error_response := check_for_and_create_comment(parsed)) is not None: + return error_response if parsed.get('webhookEvent') not in ['comment_created', 'jira:issue_updated']: logger.info(f"Unrecognized JIRA webhook event received: {parsed.get('webhookEvent')}") @@ -203,6 +119,7 @@ def webhook(request, secret=None): except Exception as e: if isinstance(e, Http404): logger.warning('404 error processing JIRA webhook') + logger.warning(str(e)) else: logger.exception(e) @@ -218,6 +135,112 @@ def webhook(request, secret=None): return HttpResponse('') +def check_for_and_create_comment(parsed_json): + """ + example incoming requests from JIRA Server 8.14.0 + { + "timestamp":1610269967824, + "webhookEvent":"comment_created", + "comment":{ + "self":"https://jira.host.com/rest/api/2/issue/115254/comment/466578", + "id":"466578", + "author":{ + "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo", + "name":"defect.dojo", + "key":"defect.dojo", # seems to be only present on JIRA Server, not on Cloud + "avatarUrls":{ + "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48", + "24x24":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24", + "16x16":"https://www.gravatar.com/avatar9637bfb970eff6176357df615f548f1c?d=mm&s=16", + "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32" + }, + "displayName":"Defect Dojo", + "active":true, + "timeZone":"Europe/Amsterdam" + }, + "body":"(Valentijn Scholten):test4", + "updateAuthor":{ + "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo", + "name":"defect.dojo", + "key":"defect.dojo", + "avatarUrls":{ + "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48", + "24x24""https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24", + "16x16":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=16", + "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32" + }, + "displayName":"Defect Dojo", + "active":true, + "timeZone":"Europe/Amsterdam" + }, + "created":"2021-01-10T10:12:47.824+0100", + "updated":"2021-01-10T10:12:47.824+0100" + } + } + """ + comment = parsed_json.get("comment", None) + if comment is None: + return + + comment_text = comment.get('body') + commenter = '' + if 'name' in comment.get('updateAuthor'): + commenter = comment.get('updateAuthor', {}).get('name') + elif 'emailAddress' in comment.get('updateAuthor'): + commenter = comment.get('updateAuthor', {}).get('emailAddress') + else: + logger.debug('Could not find the author of this jira comment!') + commenter_display_name = comment.get('updateAuthor', {}).get('displayName') + # example: body['comment']['self'] = "http://www.testjira.com/jira_under_a_path/rest/api/2/issue/666/comment/456843" + jid = comment.get('self', '').split('/')[-3] + jissue = get_object_or_404(JIRA_Issue, jira_id=jid) + logging.info(f"Received issue comment for {jissue.jira_key}") + logger.debug('jissue: %s', vars(jissue)) + + jira_usernames = JIRA_Instance.objects.values_list('username', flat=True) + for jira_user_id in jira_usernames: + # logger.debug('incoming username: %s jira config username: %s', commenter.lower(), jira_user_id.lower()) + if jira_user_id.lower() == commenter.lower(): + logger.debug('skipping incoming JIRA comment as the user id of the comment in JIRA (%s) matches the JIRA username in DefectDojo (%s)', commenter.lower(), jira_user_id.lower()) + return HttpResponse('') + + findings = None + if jissue.finding: + findings = [jissue.finding] + create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding", args=(jissue.finding.id,)), icon='check') + + elif jissue.finding_group: + findings = [jissue.finding_group.findings.all()] + create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding_group", args=(jissue.finding_group.id,)), icon='check') + + elif jissue.engagement: + return HttpResponse('Comment for engagement ignored') + else: + raise Http404(f'No finding or engagement found for JIRA issue {jissue.jira_key}') + + # Set the fields for the notes + author, _ = User.objects.get_or_create(username='JIRA') + entry = f'({commenter_display_name} ({commenter})): {comment_text}' + # Iterate (potentially) over each of the findings the note should be added to + for finding in findings: + # Determine if this exact note was created within the last 30 seconds to avoid duplicate notes + existing_notes = finding.notes.filter( + entry=entry, + author=author, + date__gte=(timezone.now() - datetime.timedelta(seconds=30)), + ) + # Check the query for any hits + if existing_notes.count() == 0: + new_note = Notes() + new_note.entry = entry + new_note.author = author + new_note.save() + finding.notes.add(new_note) + finding.jira_issue.jira_change = timezone.now() + finding.jira_issue.save() + finding.save() + + def get_custom_field(jira, label): url = jira._options["server"].strip('/') + '/rest/api/2/field' response = jira._session.get(url).json() From 7124335f213433b9f8cceddd9cd77499b46d71f9 Mon Sep 17 00:00:00 2001 From: tomaszn Date: Mon, 12 Feb 2024 03:45:30 +0100 Subject: [PATCH 08/34] add metrics page: "Product Tag Count" (fixes #9151) (#9152) * add metrics page: "Product Tag Count" It is fully based on "Product Type Count" metrics page. * fixup! add metrics page: "Product Tag Count" * Fix Flake8 * Update views.py --------- Co-authored-by: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> --- docs/content/en/usage/features.md | 3 + dojo/forms.py | 20 +++- dojo/locale/en/LC_MESSAGES/django.po | 4 + dojo/metrics/urls.py | 2 + dojo/metrics/views.py | 164 ++++++++++++++++++++++++++- dojo/templates/base.html | 5 + dojo/templates/dojo/pt_counts.html | 10 +- dojo/utils.py | 10 +- 8 files changed, 205 insertions(+), 13 deletions(-) diff --git a/docs/content/en/usage/features.md b/docs/content/en/usage/features.md index fdd3e19480d..470c009bf71 100644 --- a/docs/content/en/usage/features.md +++ b/docs/content/en/usage/features.md @@ -557,6 +557,9 @@ Product Type Counts ![Product Type Counts](../../images/met_2.png) +Product Tag Counts +: Same as above, but for a group of products sharing a tag. + Simple Metrics : Provides tabular data for all Product Types. The data displayed in this view is the total number of S0, S1, S2, S3, S4, Opened This diff --git a/dojo/forms.py b/dojo/forms.py index 558c09ae69d..36b9cd1a908 100755 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -2119,21 +2119,37 @@ def get_years(): return [(now.year, now.year), (now.year - 1, now.year - 1), (now.year - 2, now.year - 2)] -class ProductTypeCountsForm(forms.Form): +class ProductCountsFormBase(forms.Form): month = forms.ChoiceField(choices=list(MONTHS.items()), required=True, error_messages={ 'required': '*'}) year = forms.ChoiceField(choices=get_years, required=True, error_messages={ 'required': '*'}) + + +class ProductTypeCountsForm(ProductCountsFormBase): product_type = forms.ModelChoiceField(required=True, queryset=Product_Type.objects.none(), error_messages={ 'required': '*'}) def __init__(self, *args, **kwargs): - super(ProductTypeCountsForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product_type'].queryset = get_authorized_product_types(Permissions.Product_Type_View) +class ProductTagCountsForm(ProductCountsFormBase): + product_tag = forms.ModelChoiceField(required=True, + queryset=Product.tags.tag_model.objects.none().order_by('name'), + error_messages={ + 'required': '*'}) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + prods = get_authorized_products(Permissions.Product_View) + tags_available_to_user = Product.tags.tag_model.objects.filter(product__in=prods) + self.fields['product_tag'].queryset = tags_available_to_user + + class APIKeyForm(forms.ModelForm): id = forms.IntegerField(required=True, widget=forms.widgets.HiddenInput()) diff --git a/dojo/locale/en/LC_MESSAGES/django.po b/dojo/locale/en/LC_MESSAGES/django.po index dbb9e756559..ab26c8cbdb4 100644 --- a/dojo/locale/en/LC_MESSAGES/django.po +++ b/dojo/locale/en/LC_MESSAGES/django.po @@ -2692,6 +2692,10 @@ msgstr "" msgid "Product Type Counts" msgstr "" +#: dojo/templates/base.html +msgid "Product Tag Counts" +msgstr "" + #: dojo/templates/base.html msgid "Users" msgstr "" diff --git a/dojo/metrics/urls.py b/dojo/metrics/urls.py index 06b0726a56a..7b2683cf6f7 100644 --- a/dojo/metrics/urls.py +++ b/dojo/metrics/urls.py @@ -18,6 +18,8 @@ views.metrics, name='product_type_metrics'), re_path(r'^metrics/product/type/counts$', views.product_type_counts, name='product_type_counts'), + re_path(r'^metrics/product/tag/counts$', + views.product_tag_counts, name='product_tag_counts'), re_path(r'^metrics/engineer$', views.engineer_metrics, name='engineer_metrics'), re_path(r'^metrics/engineer/(?P\d+)$', views.view_engineer, diff --git a/dojo/metrics/views.py b/dojo/metrics/views.py index e00cbcb857a..4d9236fb58a 100644 --- a/dojo/metrics/views.py +++ b/dojo/metrics/views.py @@ -21,7 +21,7 @@ from django.utils import timezone from dojo.filters import MetricsFindingFilter, UserFilter, MetricsEndpointFilter -from dojo.forms import SimpleMetricsForm, ProductTypeCountsForm +from dojo.forms import SimpleMetricsForm, ProductTypeCountsForm, ProductTagCountsForm from dojo.models import Product_Type, Finding, Product, Engagement, Test, \ Risk_Acceptance, Dojo_User, Endpoint_Status from dojo.utils import get_page_items, add_breadcrumb, findings_this_period, opened_in_period, count_findings, \ @@ -586,13 +586,13 @@ def product_type_counts(request): end_date.month, end_date.day, tzinfo=timezone.get_current_timezone()) - oip = opened_in_period(start_date, end_date, pt) + oip = opened_in_period(start_date, end_date, test__engagement__product__prod_type=pt) # trending data - 12 months for x in range(12, 0, -1): opened_in_period_list.append( opened_in_period(start_date + relativedelta(months=-x), end_of_month + relativedelta(months=-x), - pt)) + test__engagement__product__prod_type=pt)) opened_in_period_list.append(oip) @@ -697,6 +697,164 @@ def product_type_counts(request): ) +def product_tag_counts(request): + form = ProductTagCountsForm() + opened_in_period_list = [] + oip = None + cip = None + aip = None + all_current_in_pt = None + top_ten = None + pt = None + today = timezone.now() + first_of_month = today.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + mid_month = first_of_month.replace(day=15, hour=23, minute=59, second=59, microsecond=999999) + end_of_month = mid_month.replace(day=monthrange(today.year, today.month)[1], hour=23, minute=59, second=59, + microsecond=999999) + start_date = first_of_month + end_date = end_of_month + + if request.method == 'GET' and 'month' in request.GET and 'year' in request.GET and 'product_tag' in request.GET: + form = ProductTagCountsForm(request.GET) + if form.is_valid(): + prods = get_authorized_products(Permissions.Product_View) + + pt = form.cleaned_data['product_tag'] + month = int(form.cleaned_data['month']) + year = int(form.cleaned_data['year']) + first_of_month = first_of_month.replace(month=month, year=year) + + month_requested = datetime(year, month, 1) + + end_of_month = month_requested.replace(day=monthrange(month_requested.year, month_requested.month)[1], + hour=23, minute=59, second=59, microsecond=999999) + start_date = first_of_month + start_date = datetime(start_date.year, + start_date.month, start_date.day, + tzinfo=timezone.get_current_timezone()) + end_date = end_of_month + end_date = datetime(end_date.year, + end_date.month, end_date.day, + tzinfo=timezone.get_current_timezone()) + + oip = opened_in_period(start_date, end_date, + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods) + + # trending data - 12 months + for x in range(12, 0, -1): + opened_in_period_list.append( + opened_in_period(start_date + relativedelta(months=-x), end_of_month + relativedelta(months=-x), + test__engagement__product__tags__name=pt, test__engagement__product__in=prods)) + + opened_in_period_list.append(oip) + + closed_in_period = Finding.objects.filter(mitigated__date__range=[start_date, end_date], + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=('Critical', 'High', 'Medium', 'Low')).values( + 'numerical_severity').annotate(Count('numerical_severity')).order_by('numerical_severity') + + total_closed_in_period = Finding.objects.filter(mitigated__date__range=[start_date, end_date], + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=( + 'Critical', 'High', 'Medium', 'Low')).aggregate( + total=Sum( + Case(When(severity__in=('Critical', 'High', 'Medium', 'Low'), + then=Value(1)), + output_field=IntegerField())))['total'] + + overall_in_pt = Finding.objects.filter(date__lt=end_date, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + mitigated__isnull=True, + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=('Critical', 'High', 'Medium', 'Low')).values( + 'numerical_severity').annotate(Count('numerical_severity')).order_by('numerical_severity') + + total_overall_in_pt = Finding.objects.filter(date__lte=end_date, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + mitigated__isnull=True, + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=('Critical', 'High', 'Medium', 'Low')).aggregate( + total=Sum( + Case(When(severity__in=('Critical', 'High', 'Medium', 'Low'), + then=Value(1)), + output_field=IntegerField())))['total'] + + all_current_in_pt = Finding.objects.filter(date__lte=end_date, + verified=True, + false_p=False, + duplicate=False, + out_of_scope=False, + mitigated__isnull=True, + test__engagement__product__tags__name=pt, + test__engagement__product__in=prods, + severity__in=( + 'Critical', 'High', 'Medium', 'Low')).prefetch_related( + 'test__engagement__product', + 'test__engagement__product__prod_type', + 'test__engagement__risk_acceptance', + 'reporter').order_by( + 'numerical_severity') + + top_ten = Product.objects.filter(engagement__test__finding__date__lte=end_date, + engagement__test__finding__verified=True, + engagement__test__finding__false_p=False, + engagement__test__finding__duplicate=False, + engagement__test__finding__out_of_scope=False, + engagement__test__finding__mitigated__isnull=True, + engagement__test__finding__severity__in=( + 'Critical', 'High', 'Medium', 'Low'), + tags__name=pt, engagement__product__in=prods) + top_ten = severity_count(top_ten, 'annotate', 'engagement__test__finding__severity').order_by('-critical', '-high', '-medium', '-low')[:10] + + cip = {'S0': 0, + 'S1': 0, + 'S2': 0, + 'S3': 0, + 'Total': total_closed_in_period} + + aip = {'S0': 0, + 'S1': 0, + 'S2': 0, + 'S3': 0, + 'Total': total_overall_in_pt} + + for o in closed_in_period: + cip[o['numerical_severity']] = o['numerical_severity__count'] + + for o in overall_in_pt: + aip[o['numerical_severity']] = o['numerical_severity__count'] + else: + messages.add_message(request, messages.ERROR, _("Please choose month and year and the Product Tag."), + extra_tags='alert-danger') + + add_breadcrumb(title=_("Bi-Weekly Metrics"), top_level=True, request=request) + + return render(request, + 'dojo/pt_counts.html', + {'form': form, + 'start_date': start_date, + 'end_date': end_date, + 'opened_in_period': oip, + 'trending_opened': opened_in_period_list, + 'closed_in_period': cip, + 'overall_in_pt': aip, + 'all_current_in_pt': all_current_in_pt, + 'top_ten': top_ten, + 'pt': pt} + ) + + def engineer_metrics(request): # only superusers can select other users to view if request.user.is_superuser: diff --git a/dojo/templates/base.html b/dojo/templates/base.html index 8e42e4278a6..f4043d42e3c 100644 --- a/dojo/templates/base.html +++ b/dojo/templates/base.html @@ -407,6 +407,11 @@ {% trans "Product Type Counts" %} +
  • + + {% trans "Product Tag Counts" %} + +
  • {% trans "Simple Metrics" %} diff --git a/dojo/templates/dojo/pt_counts.html b/dojo/templates/dojo/pt_counts.html index 0c8728b42c1..5cfc6a96934 100644 --- a/dojo/templates/dojo/pt_counts.html +++ b/dojo/templates/dojo/pt_counts.html @@ -12,7 +12,7 @@ {% block content %} {{ block.super }} -
    + {{ form.as_p }}
    @@ -20,8 +20,12 @@ {% if pt %}

    {% blocktrans with start_date=start_date.date end_date=end_date.date%}Finding Information For Period of {{ start_date }} - {{ end_date }} {% endblocktrans %}

    -

    {{ pt.name }}

    [ -
    {% trans "View Details" %}] +

    {{ pt.name }}

    + {% if pt|class_name == "Product_Type" %} + [{% trans "View Details" %}] + {% elif pt|class_name == "Tagulous_Product_tags" %} + [{% trans "View Details" %}] + {% endif %}

    {% trans "Total Security Bug Count In Period" %}

    diff --git a/dojo/utils.py b/dojo/utils.py index 135d341e54f..42334262d94 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -1082,7 +1082,7 @@ def get_period_counts(findings, } -def opened_in_period(start_date, end_date, pt): +def opened_in_period(start_date, end_date, **kwargs): start_date = datetime( start_date.year, start_date.month, @@ -1095,7 +1095,7 @@ def opened_in_period(start_date, end_date, pt): tzinfo=timezone.get_current_timezone()) opened_in_period = Finding.objects.filter( date__range=[start_date, end_date], - test__engagement__product__prod_type=pt, + **kwargs, verified=True, false_p=False, duplicate=False, @@ -1107,7 +1107,7 @@ def opened_in_period(start_date, end_date, pt): Count('numerical_severity')).order_by('numerical_severity') total_opened_in_period = Finding.objects.filter( date__range=[start_date, end_date], - test__engagement__product__prod_type=pt, + **kwargs, verified=True, false_p=False, duplicate=False, @@ -1139,7 +1139,7 @@ def opened_in_period(start_date, end_date, pt): 'closed': Finding.objects.filter( mitigated__date__range=[start_date, end_date], - test__engagement__product__prod_type=pt, + **kwargs, severity__in=('Critical', 'High', 'Medium', 'Low')).aggregate( total=Sum( Case( @@ -1155,7 +1155,7 @@ def opened_in_period(start_date, end_date, pt): duplicate=False, out_of_scope=False, mitigated__isnull=True, - test__engagement__product__prod_type=pt, + **kwargs, severity__in=('Critical', 'High', 'Medium', 'Low')).count() } From 19db206c8332f2a3623bc41de6fce423b438c901 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 12 Feb 2024 09:13:08 -0600 Subject: [PATCH 09/34] Release Drafter: Try validating inputs --- .github/workflows/fetch-oas.yml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/fetch-oas.yml b/.github/workflows/fetch-oas.yml index 44692ddb5cb..0dd32805b58 100644 --- a/.github/workflows/fetch-oas.yml +++ b/.github/workflows/fetch-oas.yml @@ -10,6 +10,9 @@ on: This will override any version calculated by the release-drafter. required: true +env: + release_version: ${{ github.event.inputs.version || github.event.inputs.release_number }} + jobs: oas_fetch: name: Fetch OpenAPI Specifications @@ -21,19 +24,19 @@ jobs: - name: Checkout uses: actions/checkout@v4 with: - ref: ${{ github.event.inputs.version }} + ref: release/${{ env.release_version }} - name: Load docker images run: |- - docker pull defectdojo/defectdojo-django:${{ github.event.inputs.version }}-alpine - docker pull defectdojo/defectdojo-nginx:${{ github.event.inputs.version }}-alpine + docker pull defectdojo/defectdojo-django:${{ env.release_version }}-alpine + docker pull defectdojo/defectdojo-nginx:${{ env.release_version }}-alpine docker images - name: Start Dojo run: docker-compose --profile postgres-redis --env-file ./docker/environments/postgres-redis.env up --no-deps -d postgres nginx uwsgi env: - DJANGO_VERSION: ${{ github.event.inputs.version }}-alpine - NGINX_VERSION: ${{ github.event.inputs.version }}-alpine + DJANGO_VERSION: ${{ env.release_version }}-alpine + NGINX_VERSION: ${{ env.release_version }}-alpine - name: Download OpenAPI Specifications run: |- From b1890d5369037ee977e1610faa242b4718e6e806 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 12 Feb 2024 13:28:30 -0600 Subject: [PATCH 10/34] Disallow duplicate tool types (#9530) * Disallow duplicate tool types * Fix Flake8 * Only validate on new creations * Force new name on tool type unit test --- dojo/api_v2/serializers.py | 8 ++++++++ dojo/forms.py | 17 +++++++++++++++++ unittests/test_swagger_schema.py | 3 +++ 3 files changed, 28 insertions(+) diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 45d2707a6e0..2d126115080 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -1133,6 +1133,14 @@ class Meta: model = Tool_Type fields = "__all__" + def validate(self, data): + if self.context["request"].method == "POST": + name = data.get("name") + # Make sure this will not create a duplicate test type + if Tool_Type.objects.filter(name=name).count() > 0: + raise serializers.ValidationError('A Tool Type with the name already exists') + return data + class RegulationSerializer(serializers.ModelSerializer): class Meta: diff --git a/dojo/forms.py b/dojo/forms.py index 558c09ae69d..27a1fb0c287 100755 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -2388,6 +2388,23 @@ class Meta: model = Tool_Type exclude = ['product'] + def __init__(self, *args, **kwargs): + instance = kwargs.get('instance', None) + self.newly_created = True + if instance is not None: + self.newly_created = instance.pk is None + super().__init__(*args, **kwargs) + + def clean(self): + form_data = self.cleaned_data + if self.newly_created: + name = form_data.get("name") + # Make sure this will not create a duplicate test type + if Tool_Type.objects.filter(name=name).count() > 0: + raise forms.ValidationError('A Tool Type with the name already exists') + + return form_data + class RegulationForm(forms.ModelForm): class Meta: diff --git a/unittests/test_swagger_schema.py b/unittests/test_swagger_schema.py index 9f1316b4d2e..b1263359374 100644 --- a/unittests/test_swagger_schema.py +++ b/unittests/test_swagger_schema.py @@ -785,6 +785,9 @@ def __init__(self, *args, **kwargs): self.viewset = ToolTypesViewSet self.model = Tool_Type self.serializer = ToolTypeSerializer + self.field_transformers = { + "name": lambda v: v + "_new" + } class UserTest(BaseClass.SchemaTest): From eaf9f176ff2961bf76136893a2fab6aa7ccd2125 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 12 Feb 2024 13:29:55 -0600 Subject: [PATCH 11/34] Engagement Surveys: Add missing leading slash (#9531) URL redirects were behaving strangely without this leading slash. it seems it was missed when all the others were added --- dojo/templates/dojo/dashboard.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/templates/dojo/dashboard.html b/dojo/templates/dojo/dashboard.html index 8d3227f9759..8e049086094 100644 --- a/dojo/templates/dojo/dashboard.html +++ b/dojo/templates/dojo/dashboard.html @@ -207,7 +207,7 @@ {% else %} {% trans "View Responses" %} - {% trans "Create Engagement" %} + {% trans "Create Engagement" %} {% endif %} From 5ae08f404cc5462ac3ae274544254d1ccec8a869 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 12 Feb 2024 19:33:02 +0000 Subject: [PATCH 12/34] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/package.json b/components/package.json index 9a57f7b78dd..4c9fc573d81 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.32.0-dev", + "version": "2.31.1", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index f1c39c15ed1..174901e835d 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '2.32.0-dev' +__version__ = '2.31.1' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 53bce7bc759..0af7d7c32b9 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.32.0-dev" +appVersion: "2.31.1" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.109-dev +version: 1.6.109 icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap From 93f449d3c8f45922f9dd1890c5a5cd20fb09ea57 Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 12 Feb 2024 20:12:56 +0000 Subject: [PATCH 13/34] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/package.json b/components/package.json index 4c9fc573d81..9a57f7b78dd 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.31.1", + "version": "2.32.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 174901e835d..f1c39c15ed1 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa -__version__ = '2.31.1' +__version__ = '2.32.0-dev' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 0af7d7c32b9..60c20292d0f 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.31.1" +appVersion: "2.32.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.109 +version: 1.6.110-dev icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap From 10ac52dc8f3eba2a6db4f5db84d0881d4298cb2a Mon Sep 17 00:00:00 2001 From: kiblik Date: Mon, 12 Feb 2024 21:22:16 +0000 Subject: [PATCH 14/34] Dojo_Group: Support for "RemoteUser" in model (#9405) * Use correct name references * fix db_mig * Update and rename 0201_alter_dojo_group_social_provider.py to 0202_alter_dojo_group_social_provider.py --------- Co-authored-by: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> --- .../0202_alter_dojo_group_social_provider.py | 18 ++++++++++++++++++ dojo/models.py | 4 +++- dojo/pipeline.py | 2 +- dojo/remote_user.py | 3 ++- unittests/test_remote_user.py | 4 ++-- 5 files changed, 26 insertions(+), 5 deletions(-) create mode 100644 dojo/db_migrations/0202_alter_dojo_group_social_provider.py diff --git a/dojo/db_migrations/0202_alter_dojo_group_social_provider.py b/dojo/db_migrations/0202_alter_dojo_group_social_provider.py new file mode 100644 index 00000000000..9bbc7e2e5c6 --- /dev/null +++ b/dojo/db_migrations/0202_alter_dojo_group_social_provider.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.13 on 2024-01-25 00:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0201_populate_finding_sla_expiration_date'), + ] + + operations = [ + migrations.AlterField( + model_name='dojo_group', + name='social_provider', + field=models.CharField(blank=True, choices=[('AzureAD', 'AzureAD'), ('Remote', 'Remote')], help_text='Group imported from a social provider.', max_length=10, null=True, verbose_name='Social Authentication Provider'), + ), + ] diff --git a/dojo/models.py b/dojo/models.py index 45d522963ee..77dead1482e 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -246,14 +246,16 @@ class UserContactInfo(models.Model): class Dojo_Group(models.Model): AZURE = 'AzureAD' + REMOTE = 'Remote' SOCIAL_CHOICES = ( (AZURE, _('AzureAD')), + (REMOTE, _('Remote')), ) name = models.CharField(max_length=255, unique=True) description = models.CharField(max_length=4000, null=True, blank=True) users = models.ManyToManyField(Dojo_User, through='Dojo_Group_Member', related_name='users', blank=True) auth_group = models.ForeignKey(Group, null=True, blank=True, on_delete=models.CASCADE) - social_provider = models.CharField(max_length=10, choices=SOCIAL_CHOICES, blank=True, null=True, help_text='Group imported from a social provider.', verbose_name='Social Authentication Provider') + social_provider = models.CharField(max_length=10, choices=SOCIAL_CHOICES, blank=True, null=True, help_text=_('Group imported from a social provider.'), verbose_name=_('Social Authentication Provider')) def __str__(self): return self.name diff --git a/dojo/pipeline.py b/dojo/pipeline.py index 0ce76220e98..130a795e092 100644 --- a/dojo/pipeline.py +++ b/dojo/pipeline.py @@ -98,7 +98,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs): except Exception as e: logger.error(f"Could not call microsoft graph API or save groups to member: {e}") if len(group_names) > 0: - assign_user_to_groups(user, group_names, 'AzureAD') + assign_user_to_groups(user, group_names, Dojo_Group.AZURE) if settings.AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS: cleanup_old_groups_for_user(user, group_names) diff --git a/dojo/remote_user.py b/dojo/remote_user.py index 875291c7ba2..7ed5f0a6a4a 100644 --- a/dojo/remote_user.py +++ b/dojo/remote_user.py @@ -6,6 +6,7 @@ from netaddr import IPAddress from django.conf import settings from dojo.pipeline import assign_user_to_groups, cleanup_old_groups_for_user +from dojo.models import Dojo_Group logger = logging.getLogger(__name__) @@ -77,7 +78,7 @@ def configure_user(self, request, user, created=True): if settings.AUTH_REMOTEUSER_GROUPS_HEADER and \ settings.AUTH_REMOTEUSER_GROUPS_HEADER in request.META: - assign_user_to_groups(user, request.META[settings.AUTH_REMOTEUSER_GROUPS_HEADER].split(','), 'Remote') + assign_user_to_groups(user, request.META[settings.AUTH_REMOTEUSER_GROUPS_HEADER].split(','), Dojo_Group.REMOTE) if settings.AUTH_REMOTEUSER_GROUPS_CLEANUP and \ settings.AUTH_REMOTEUSER_GROUPS_HEADER and \ diff --git a/unittests/test_remote_user.py b/unittests/test_remote_user.py index 384e4dda75b..d764358e11e 100644 --- a/unittests/test_remote_user.py +++ b/unittests/test_remote_user.py @@ -16,8 +16,8 @@ def setUp(self): last_name='original_last', email='original@mail.com', ) - self.group1, _ = Dojo_Group.objects.get_or_create(name="group1", social_provider="Remote") - self.group2, _ = Dojo_Group.objects.get_or_create(name="group2", social_provider="Remote") + self.group1, _ = Dojo_Group.objects.get_or_create(name="group1", social_provider=Dojo_Group.REMOTE) + self.group2, _ = Dojo_Group.objects.get_or_create(name="group2", social_provider=Dojo_Group.REMOTE) @override_settings(AUTH_REMOTEUSER_ENABLED=False) def test_disabled(self): From 98ecd13248737b9d49a9df9e3e7253d549e0e497 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:01:41 -0600 Subject: [PATCH 15/34] Update rabbitmq:3.12.12-alpine Docker digest from 3.12.12 to 3.12.12-alpine (docker-compose.yml) (#9535) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 4394261baf4..36e83aeb5bd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -149,7 +149,7 @@ services: volumes: - defectdojo_postgres:/var/lib/postgresql/data rabbitmq: - image: rabbitmq:3.12.12-alpine@sha256:fcd6a66524be55c15c81011dc87cc4b6e4405130fbb950c21ad1d31e8f6322dd + image: rabbitmq:3.12.12-alpine@sha256:09216fbcc8cb9588556bcecaa274b4de94d5ec3c2e3ab9c6efdc64677750c648 profiles: - mysql-rabbitmq - postgres-rabbitmq From a5aac391178547283c883f7353fef12f5669bdc6 Mon Sep 17 00:00:00 2001 From: Felix Hernandez Date: Tue, 13 Feb 2024 14:02:19 -0600 Subject: [PATCH 16/34] remove flot-axis library (#9540) --- components/package.json | 1 - dojo/templates/dojo/dashboard-metrics.html | 1 - dojo/templates/dojo/endpoint_pdf_report.html | 1 - dojo/templates/dojo/engagement_pdf_report.html | 1 - dojo/templates/dojo/finding_pdf_report.html | 1 - dojo/templates/dojo/product_endpoint_pdf_report.html | 1 - dojo/templates/dojo/product_metrics.html | 1 - dojo/templates/dojo/product_pdf_report.html | 1 - dojo/templates/dojo/product_type_pdf_report.html | 1 - dojo/templates/dojo/test_pdf_report.html | 1 - dojo/templates/dojo/view_endpoint.html | 1 - 11 files changed, 11 deletions(-) diff --git a/components/package.json b/components/package.json index 9a57f7b78dd..b4e7dc9db26 100644 --- a/components/package.json +++ b/components/package.json @@ -21,7 +21,6 @@ "drmonty-datatables-responsive": "^1.0.0", "easymde": "^2.18.0", "flot": "flot/flot#~0.8.3", - "flot-axis": "markrcote/flot-axislabels#*", "font-awesome": "^4.0.0", "fullcalendar": "^3.10.2", "google-code-prettify": "^1.0.0", diff --git a/dojo/templates/dojo/dashboard-metrics.html b/dojo/templates/dojo/dashboard-metrics.html index 2f62a8926aa..929bea53e92 100644 --- a/dojo/templates/dojo/dashboard-metrics.html +++ b/dojo/templates/dojo/dashboard-metrics.html @@ -176,7 +176,6 @@

    {% blocktrans with start_date=start_date.date end_date=end_date.date%}{{ nam - {% if punchcard %} diff --git a/dojo/templates/dojo/endpoint_pdf_report.html b/dojo/templates/dojo/endpoint_pdf_report.html index d08e090173d..b53c8337422 100644 --- a/dojo/templates/dojo/endpoint_pdf_report.html +++ b/dojo/templates/dojo/endpoint_pdf_report.html @@ -279,7 +279,6 @@

    Notes
    - - - - {% if punchcard %} diff --git a/dojo/templates/dojo/product_metrics.html b/dojo/templates/dojo/product_metrics.html index 656bc4a8db1..d5c0b659757 100644 --- a/dojo/templates/dojo/product_metrics.html +++ b/dojo/templates/dojo/product_metrics.html @@ -489,7 +489,6 @@

    - {% include "dojo/filter_js_snippet.html" %} {% if punchcard %} diff --git a/dojo/templates/dojo/product_pdf_report.html b/dojo/templates/dojo/product_pdf_report.html index eb80b8148a2..aa413c72d1d 100644 --- a/dojo/templates/dojo/product_pdf_report.html +++ b/dojo/templates/dojo/product_pdf_report.html @@ -383,7 +383,6 @@

    Notes
    - {% if punchcard %} diff --git a/dojo/templates/dojo/product_type_pdf_report.html b/dojo/templates/dojo/product_type_pdf_report.html index 9bd22d587bc..f8c4175e5de 100644 --- a/dojo/templates/dojo/product_type_pdf_report.html +++ b/dojo/templates/dojo/product_type_pdf_report.html @@ -314,7 +314,6 @@
    Notes
    - - - {% block metrics %} {% endblock metrics %} From 329f9422c7ec634248ddd395c35e048b0b6de183 Mon Sep 17 00:00:00 2001 From: Sebastian Gumprich Date: Wed, 14 Feb 2024 19:00:07 +0100 Subject: [PATCH 17/34] use full url for helm-repos and alias in renovate.json (#9525) With this change, renovate will create PRs to update the helm-dependencies, just as with docker-compose. Note that only setting the repository to the full URL did not work, I also had to add the registryAlias. --- .github/renovate.json | 5 ++++- helm/defectdojo/Chart.yaml | 10 +++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.github/renovate.json b/.github/renovate.json index f64560dd545..7c9c6623cd6 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -12,5 +12,8 @@ "commitMessageExtra": "from {{currentVersion}} to {{#if isMajor}}v{{{newMajor}}}{{else}}{{#if isSingleVersion}}v{{{toVersion}}}{{else}}{{{newValue}}}{{/if}}{{/if}}", "commitMessageSuffix": "({{packageFile}})", "labels": ["dependencies"] - }] + }], + "registryAliases": { + "bitnami": "https://charts.bitnami.com/bitnami" + } } diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 60c20292d0f..ecdffcc3376 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -11,22 +11,22 @@ maintainers: dependencies: - name: mysql version: ~9.1.7 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" condition: mysql.enabled - name: postgresql version: ~11.6.5 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha version: ~9.1.5 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" alias: postgresqlha condition: postgresqlha.enabled - name: rabbitmq version: ~11.2.0 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" condition: rabbitmq.enabled - name: redis version: ~16.12.0 - repository: "@bitnami" + repository: "https://charts.bitnami.com/bitnami" condition: redis.enabled From b008a8a62abd3eb51334fa88d20f2c14c2231844 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 21:23:33 -0600 Subject: [PATCH 18/34] Update Helm release redis from 16.12.3 to ~16.13.0 (helm/defectdojo/Chart.yaml) (#9550) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index f332717cd06..2bebae633dc 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -13,6 +13,6 @@ dependencies: version: 11.2.2 - name: redis repository: https://charts.bitnami.com/bitnami - version: 16.12.3 -digest: sha256:f53ebb0cea44dfbb72ac96ae98680848acd5e17a0947a728e5646460d0da4ef9 -generated: "2023-03-06T17:08:53.379497544Z" + version: 16.13.2 +digest: sha256:f1dea5877872e8baa25492fb77c4468502bdfeb8f520f00f9598b4d33465ce82 +generated: "2024-02-14T22:26:01.747974179Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index ecdffcc3376..414168cc430 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -27,6 +27,6 @@ dependencies: repository: "https://charts.bitnami.com/bitnami" condition: rabbitmq.enabled - name: redis - version: ~16.12.0 + version: ~16.13.0 repository: "https://charts.bitnami.com/bitnami" condition: redis.enabled From 91d485df6724a7c5f544cdf7c81e81d2605ab508 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 21:28:45 -0600 Subject: [PATCH 19/34] Update rabbitmq:3.12.12-alpine Docker digest from 3.12.12 to 3.12.12-alpine (docker-compose.yml) (#9541) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 36e83aeb5bd..89b06e264d7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -149,7 +149,7 @@ services: volumes: - defectdojo_postgres:/var/lib/postgresql/data rabbitmq: - image: rabbitmq:3.12.12-alpine@sha256:09216fbcc8cb9588556bcecaa274b4de94d5ec3c2e3ab9c6efdc64677750c648 + image: rabbitmq:3.12.12-alpine@sha256:9144c0eca261e36ffd1a3f9ef21a860242a4a60e0211bbade82c80910958a5e9 profiles: - mysql-rabbitmq - postgres-rabbitmq From 26f959abd11013565c3de42264ee60c7903e5c88 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 21:29:07 -0600 Subject: [PATCH 20/34] Update postgres Docker tag from 16.1 to v16.2 (docker-compose.yml) (#9536) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 89b06e264d7..1b43001c3c1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -138,7 +138,7 @@ services: volumes: - defectdojo_data:/var/lib/mysql postgres: - image: postgres:16.1-alpine@sha256:17eb369d9330fe7fbdb2f705418c18823d66322584c77c2b43cc0e1851d01de7 + image: postgres:16.2-alpine@sha256:bbd7346fab25b7e0b25f214829d6ebfb78ef0465059492e46dee740ce8fcd844 profiles: - postgres-rabbitmq - postgres-redis From 16fc7a7c5ff1bce2762a4e231bca74627120fae3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:00:10 -0600 Subject: [PATCH 21/34] Update Helm release mysql from 9.1.8 to ~9.19.0 (helm/defectdojo/Chart.yaml) (#9545) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index 2bebae633dc..7152b06eea7 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -1,7 +1,7 @@ dependencies: - name: mysql repository: https://charts.bitnami.com/bitnami - version: 9.1.8 + version: 9.19.1 - name: postgresql repository: https://charts.bitnami.com/bitnami version: 11.6.26 @@ -14,5 +14,5 @@ dependencies: - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:f1dea5877872e8baa25492fb77c4468502bdfeb8f520f00f9598b4d33465ce82 -generated: "2024-02-14T22:26:01.747974179Z" +digest: sha256:055c755109a79afc56850a8c742db9968c1ab1b64ea5b1c6c79dd26192ce14d3 +generated: "2024-02-15T03:24:53.319013122Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 414168cc430..9e1c002fe58 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -10,7 +10,7 @@ maintainers: url: https://github.com/DefectDojo/django-DefectDojo dependencies: - name: mysql - version: ~9.1.7 + version: ~9.19.0 repository: "https://charts.bitnami.com/bitnami" condition: mysql.enabled - name: postgresql From a2d2fc80839dd6eb4db51a092c6002476ebe1a82 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:52:02 -0600 Subject: [PATCH 22/34] Update Helm release rabbitmq from 11.2.2 to ~11.16.0 (helm/defectdojo/Chart.yaml) (#9548) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index 7152b06eea7..d3996a34817 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -10,9 +10,9 @@ dependencies: version: 9.1.9 - name: rabbitmq repository: https://charts.bitnami.com/bitnami - version: 11.2.2 + version: 11.16.2 - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:055c755109a79afc56850a8c742db9968c1ab1b64ea5b1c6c79dd26192ce14d3 -generated: "2024-02-15T03:24:53.319013122Z" +digest: sha256:14ecb61931de83a912605f1cca4241184b9ffcddda9450f27883d2c2eab2930e +generated: "2024-02-15T04:14:46.515518985Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 9e1c002fe58..ff2a8a14c79 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -23,7 +23,7 @@ dependencies: alias: postgresqlha condition: postgresqlha.enabled - name: rabbitmq - version: ~11.2.0 + version: ~11.16.0 repository: "https://charts.bitnami.com/bitnami" condition: rabbitmq.enabled - name: redis From 9171885921deb83478611b0ac8553337ce8a13fc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 14:23:51 -0600 Subject: [PATCH 23/34] Update Helm release postgresql from 11.6.26 to ~11.9.0 (helm/defectdojo/Chart.yaml) (#9546) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index d3996a34817..a1d949111dc 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -4,7 +4,7 @@ dependencies: version: 9.19.1 - name: postgresql repository: https://charts.bitnami.com/bitnami - version: 11.6.26 + version: 11.9.13 - name: postgresql-ha repository: https://charts.bitnami.com/bitnami version: 9.1.9 @@ -14,5 +14,5 @@ dependencies: - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:14ecb61931de83a912605f1cca4241184b9ffcddda9450f27883d2c2eab2930e -generated: "2024-02-15T04:14:46.515518985Z" +digest: sha256:117e74aeca1950886c3ef4fc4eca1166b67f70cb0ba86d7cca8087d85c18297a +generated: "2024-02-15T16:02:04.692755051Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index ff2a8a14c79..5dcf9bea6e2 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -14,7 +14,7 @@ dependencies: repository: "https://charts.bitnami.com/bitnami" condition: mysql.enabled - name: postgresql - version: ~11.6.5 + version: ~11.9.0 repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha From aba513fedf4ff704c33d7fb3feb35301648e84e7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 14:58:13 -0600 Subject: [PATCH 24/34] Update Helm release postgresql-ha from 9.1.9 to ~9.4.0 (helm/defectdojo/Chart.yaml) (#9547) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index a1d949111dc..784d84b4843 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -7,12 +7,12 @@ dependencies: version: 11.9.13 - name: postgresql-ha repository: https://charts.bitnami.com/bitnami - version: 9.1.9 + version: 9.4.11 - name: rabbitmq repository: https://charts.bitnami.com/bitnami version: 11.16.2 - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:117e74aeca1950886c3ef4fc4eca1166b67f70cb0ba86d7cca8087d85c18297a -generated: "2024-02-15T16:02:04.692755051Z" +digest: sha256:50d07c49c1fb199a70fafd032712a1d5509a0352f090bfddd2e8a22b35be0961 +generated: "2024-02-15T20:24:24.560785941Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 5dcf9bea6e2..1c44736dafb 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -18,7 +18,7 @@ dependencies: repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha - version: ~9.1.5 + version: ~9.4.0 repository: "https://charts.bitnami.com/bitnami" alias: postgresqlha condition: postgresqlha.enabled From 77f0cf1a14fea3f293c7bda70120eaf33c3c9365 Mon Sep 17 00:00:00 2001 From: kiblik Date: Thu, 15 Feb 2024 22:39:03 +0000 Subject: [PATCH 25/34] Remove DD_USE_L10N (#9491) --- dojo/settings/settings.dist.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index fad2454b7ca..5b772a6daef 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -55,7 +55,6 @@ DD_LANGUAGE_CODE=(str, 'en-us'), DD_SITE_ID=(int, 1), DD_USE_I18N=(bool, True), - DD_USE_L10N=(bool, True), DD_USE_TZ=(bool, True), DD_MEDIA_URL=(str, '/media/'), DD_MEDIA_ROOT=(str, root('media')), @@ -345,10 +344,6 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param # to load the internationalization machinery. USE_I18N = env('DD_USE_I18N') -# If you set this to False, Django will not format dates, numbers and -# calendars according to the current locale. -USE_L10N = env('DD_USE_L10N') - # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = env('DD_USE_TZ') @@ -1665,8 +1660,6 @@ def saml2_attrib_map_format(dict): EDITABLE_MITIGATED_DATA = env('DD_EDITABLE_MITIGATED_DATA') -USE_L10N = True - # FEATURE_FINDING_GROUPS feature is moved to system_settings, will be removed from settings file FEATURE_FINDING_GROUPS = env('DD_FEATURE_FINDING_GROUPS') JIRA_TEMPLATE_ROOT = env('DD_JIRA_TEMPLATE_ROOT') From 648554b67dc5b75aee04b74df6718caf47a15d3a Mon Sep 17 00:00:00 2001 From: kiblik Date: Thu, 15 Feb 2024 22:40:42 +0000 Subject: [PATCH 26/34] API: removal of drf_yasg (OpenAPI 2.0 Swagger) (#9108) * Removal of drf_yasg * Clean filterwarnings --- NOTICE | 43 - .../en/getting_started/upgrading/2.32.md | 9 +- docs/content/en/integrations/api-v2-docs.md | 7 +- dojo/api_v2/mixins.py | 5 - dojo/api_v2/prefetch/__init__.py | 3 +- dojo/api_v2/prefetch/schema.py | 168 +--- dojo/api_v2/schema/__init__.py | 17 - dojo/api_v2/schema/extra_schema.py | 140 --- dojo/api_v2/schema/utils.py | 63 -- dojo/api_v2/serializers.py | 23 +- dojo/api_v2/views.py | 443 +--------- dojo/risk_acceptance/api.py | 9 - dojo/settings/settings.dist.py | 30 - dojo/urls.py | 20 - requirements.txt | 1 - unittests/test_apiv2_metadata.py | 2 +- unittests/test_swagger_schema.py | 835 ------------------ 17 files changed, 70 insertions(+), 1748 deletions(-) delete mode 100644 dojo/api_v2/schema/__init__.py delete mode 100644 dojo/api_v2/schema/extra_schema.py delete mode 100644 dojo/api_v2/schema/utils.py delete mode 100644 unittests/test_swagger_schema.py diff --git a/NOTICE b/NOTICE index e939bd7fc99..7733257f54b 100644 --- a/NOTICE +++ b/NOTICE @@ -3910,49 +3910,6 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -drf-yasg -1.20.0 -BSD License -.. |br| raw:: html - -
    - -####### -License -####### - -******************** -BSD 3-Clause License -******************** - -Copyright (c) 2017 - 2019, Cristian V. |br|\ All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - ecdsa 0.17.0 MIT diff --git a/docs/content/en/getting_started/upgrading/2.32.md b/docs/content/en/getting_started/upgrading/2.32.md index 0d04c771e36..59081b30857 100644 --- a/docs/content/en/getting_started/upgrading/2.32.md +++ b/docs/content/en/getting_started/upgrading/2.32.md @@ -2,6 +2,13 @@ title: 'Upgrading to DefectDojo Version 2.32.x' toc_hide: true weight: -20240205 -description: No special instructions. +description: Breaking change: Removal of OpenAPI 2.0 Swagger --- There are no special instructions for upgrading to 2.32.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.32.0) for the contents of the release. + +**Removal** + +The OpenAPI 2.0 Swagger API documentation was removed in favor of the existing +OpenAPI 3.0 API documentation page. + +*Note*: The API has not changed in any way and behaves the same between OAPI2 and OAPI3 \ No newline at end of file diff --git a/docs/content/en/integrations/api-v2-docs.md b/docs/content/en/integrations/api-v2-docs.md index c64dfcc8919..7b8d1f7956c 100644 --- a/docs/content/en/integrations/api-v2-docs.md +++ b/docs/content/en/integrations/api-v2-docs.md @@ -16,11 +16,8 @@ Docs link on the user drop down menu in the header. ![image](../../images/api_v2_1.png) -The documentation is generated using [Django Rest Framework -Yet Another Swagger Generator](https://github.com/axnsan12/drf-yasg/), and is -interactive. On the top of API v2 docs is a link that generates an OpenAPI v2 spec. - -As a preparation to move to OpenAPIv3, we have added an compatible spec and documentation at [`/api/v2/oa3/swagger-ui/`](https://demo.defectdojo.org/api/v2/oa3/swagger-ui/) +The documentation is generated using [drf-spectacular](https://drf-spectacular.readthedocs.io/) at [`/api/v2/oa3/swagger-ui/`](https://demo.defectdojo.org/api/v2/oa3/swagger-ui/), and is +interactive. On the top of API v2 docs is a link that generates an OpenAPI v3 spec. To interact with the documentation, a valid Authorization header value is needed. Visit the `/api/key-v2` view to generate your diff --git a/dojo/api_v2/mixins.py b/dojo/api_v2/mixins.py index e0770971f3f..54d55a76d09 100644 --- a/dojo/api_v2/mixins.py +++ b/dojo/api_v2/mixins.py @@ -1,7 +1,6 @@ from django.db import DEFAULT_DB_ALIAS from django.contrib.admin.utils import NestedObjects from drf_spectacular.utils import extend_schema -from drf_yasg.utils import swagger_auto_schema from rest_framework.decorators import action from rest_framework import status from rest_framework.authtoken.models import Token @@ -17,10 +16,6 @@ class DeletePreviewModelMixin: status.HTTP_200_OK: serializers.DeletePreviewSerializer(many=True) }, ) - @swagger_auto_schema( - method="get", - responses={"default": serializers.DeletePreviewSerializer(many=True)}, - ) @action(detail=True, methods=["get"], filter_backends=[], suffix="List") def delete_preview(self, request, pk=None): object = self.get_object() diff --git a/dojo/api_v2/prefetch/__init__.py b/dojo/api_v2/prefetch/__init__.py index f0449c7b303..3d02655ec22 100644 --- a/dojo/api_v2/prefetch/__init__.py +++ b/dojo/api_v2/prefetch/__init__.py @@ -1,4 +1,3 @@ from .mixins import PrefetchListMixin, PrefetchRetrieveMixin -from .schema import get_prefetch_schema -__all__ = ["PrefetchListMixin", "PrefetchRetrieveMixin", "get_prefetch_schema"] +__all__ = ["PrefetchListMixin", "PrefetchRetrieveMixin"] diff --git a/dojo/api_v2/prefetch/schema.py b/dojo/api_v2/prefetch/schema.py index 6d04e751800..6fc08681477 100644 --- a/dojo/api_v2/prefetch/schema.py +++ b/dojo/api_v2/prefetch/schema.py @@ -1,84 +1,5 @@ -from drf_yasg import openapi, utils from .prefetcher import _Prefetcher from .utils import _get_prefetchable_fields -from ..schema import extra_schema -from ..schema.utils import LazySchemaRef - - -def get_prefetch_schema(methods, serializer): - """Swagger / OpenAPI v2 (drf-yasg) Return a composable swagger schema that contains in the query the fields that can be prefetch from the model - supported by the serializer and in the reponse the structure of these fields in a new top-level attribute - named prefetch. - - Returns: - ComposableSchema: A swagger schema - """ - prefetcher = _Prefetcher() - fields = _get_prefetchable_fields(serializer()) - - field_to_serializer = dict( - [ - (name, prefetcher._find_serializer(field_type)) - for name, field_type in fields - if prefetcher._find_serializer(field_type) - ] - ) - fields_to_refname = dict( - [ - (name, utils.get_serializer_ref_name(serializer())) - for name, serializer in field_to_serializer.items() - ] - ) - fields_name = [ - name - for name, field_type in fields - if prefetcher._find_serializer(field_type) - ] - - # New openapi parameter corresponding to the prefetchable fields - prefetch_params = [ - openapi.Parameter( - "prefetch", - in_=openapi.IN_QUERY, - required=False, - type=openapi.TYPE_ARRAY, - items=openapi.Items(type=openapi.TYPE_STRING, enum=fields_name), - ) - ] - - additional_props = dict( - [ - ( - name, - openapi.Schema( - type=openapi.TYPE_OBJECT, - read_only=True, - additional_properties=LazySchemaRef( - fields_to_refname[name], True - ), - ), - ) - for name in fields_name - ] - ) - prefetch_response = { - "200": { - "prefetch": openapi.Schema( - type=openapi.TYPE_OBJECT, properties=additional_props - ) - } - } - - schema = extra_schema.IdentitySchema() - for method in methods: - schema = schema.composeWith( - extra_schema.ExtraParameters(method, prefetch_params) - ) - schema = schema.composeWith( - extra_schema.ExtraResponseField(method, prefetch_response) - ) - - return schema def _get_path_to_GET_serializer_map(generator): @@ -97,6 +18,25 @@ def _get_path_to_GET_serializer_map(generator): return path_to_GET_serializer +def get_serializer_ref_name(serializer): + """Get serializer's ref_name + inspired by https://github.com/axnsan12/drf-yasg/blob/78031f0c189585c30fccb5005a6899f2d34289a9/src/drf_yasg/utils.py#L416 + + :param serializer: Serializer instance + :return: Serializer's ``ref_name`` or ``None`` for inline serializer + :rtype: str or None + """ + serializer_meta = getattr(serializer, 'Meta', None) + serializer_name = type(serializer).__name__ + if hasattr(serializer_meta, 'ref_name'): + ref_name = serializer_meta.ref_name + else: + ref_name = serializer_name + if ref_name.endswith('Serializer'): + ref_name = ref_name[:-len('Serializer')] + return ref_name + + def prefetch_postprocessing_hook(result, generator, request, public): """OpenAPI v3 (drf-spectacular) Some endpoints are using the PrefetchListMixin and PrefetchRetrieveMixin. These have nothing to do with Django prefetch_related. @@ -131,55 +71,37 @@ def prefetch_postprocessing_hook(result, generator, request, public): "enum": field_names, } - field_to_serializer = dict( - [ - (name, prefetcher._find_serializer(field_type)) - for name, field_type in fields - if prefetcher._find_serializer(field_type) - ] - ) - fields_to_refname = dict( - [ - (name, utils.get_serializer_ref_name(serializer())) - for name, serializer in field_to_serializer.items() - ] - ) - properties = dict( - [ - ( - name, - dict( - [ - ("type", "object"), - ("readOnly", True), - ( - "additionalProperties", - dict( - [ - ( - "$ref", - "#/components/schemas/" - + fields_to_refname[ - name - ], - ) - ] - ), - ), - ] - ), - ) - for name in field_names - ] - ) + field_to_serializer = { + name: prefetcher._find_serializer(field_type) + for name, field_type in fields + if prefetcher._find_serializer(field_type) + } + + fields_to_refname = { + name: get_serializer_ref_name(serializer()) + for name, serializer in field_to_serializer.items() + } + + properties = { + name: { + "type": "object", + "readOnly": True, + "additionalProperties": { + "$ref": f"#/components/schemas/{fields_to_refname[name]}" + } + } + for name in field_names + } + ref = paths[path]["get"]["responses"]["200"]["content"][ "application/json" ]["schema"]["$ref"] component_name = ref.split("/")[-1] result["components"]["schemas"][component_name][ "properties" - ]["prefetch"] = dict( - [("type", "object"), ("properties", properties)] - ) + ]["prefetch"] = { + "type": "object", + "properties": properties, + } return result diff --git a/dojo/api_v2/schema/__init__.py b/dojo/api_v2/schema/__init__.py deleted file mode 100644 index 6a69a167022..00000000000 --- a/dojo/api_v2/schema/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from .extra_schema import ( - IdentitySchema, - ExtraParameters, - ExtraResponseField, - ComposableSchema, -) -from .utils import LazySchemaRef, try_apply, resolve_lazy_ref - -__all__ = [ - "IdentitySchema", - "ExtraParameters", - "ExtraResponseField", - "ComposableSchema", - "LazySchemaRef", - "try_apply", - "resolve_lazy_ref", -] diff --git a/dojo/api_v2/schema/extra_schema.py b/dojo/api_v2/schema/extra_schema.py deleted file mode 100644 index 86fd565e370..00000000000 --- a/dojo/api_v2/schema/extra_schema.py +++ /dev/null @@ -1,140 +0,0 @@ -from drf_yasg.inspectors.view import SwaggerAutoSchema -from drf_yasg.openapi import resolve_ref, Schema -from .utils import resolve_lazy_ref -import copy - - -class ComposableSchema: - """A composable schema defines a transformation on drf_yasg Operation. These - schema can then be composed with another composable schema using the composeWith method - yielding a new composable schema whose transformation is defined as the function composition - of the transformation of the two source schema. - """ - - def transform_operation(self, operation, resolver): - """Defines an operation transformation - - Args: - operation (Operation): the operation to transform - resolver (Resolver): the schema refs resolver - """ - - def composeWith(self, schema): - """Allow two schema to be composed into a new schema. - Given the caller schema 'self' and another schema 'schema', - this operation yields a new composable schema whose transform_operation - if defined as - transform_operation(op, res) = schema.transform_operation(self.transform_operation(op, res), res) - - Args: - schema (ComposableSchema): The schema to compose with - - Returns: - ComposableSchema: the newly composed schema - """ - op = self.transform_operation - - class _Wrapper(ComposableSchema): - def transform_operation(self, operation, resolver): - return schema.transform_operation( - op(operation, resolver), resolver - ) - - return _Wrapper() - - def to_schema(self): - """Convert the composable schema into a SwaggerAutoSchema that - can be used with the drf_yasg library code - - Returns: - SwaggerAutoSchema: the swagger auto schema derived from the composable schema - """ - op = self.transform_operation - - class _Schema(SwaggerAutoSchema): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def get_operation(self, operation_keys): - operation = super().get_operation(operation_keys) - return op(operation, self.components) - - return _Schema - - -class IdentitySchema(ComposableSchema): - def transform_operation(self, operation, resolver): - return operation - - -class ExtraParameters(ComposableSchema): - """Define a schema that can add parameters to the operation""" - - def __init__(self, operation_name, extra_parameters, *args, **kwargs): - """Initialize the schema - - Args: - operation_name (string): the name of the operation to transform - extra_parameters (list[Parameter]): list of openapi parameters to add - """ - super().__init__(*args, **kwargs) - self._extra_parameters = extra_parameters - self._operation_name = operation_name - - def transform_operation(self, operation, resolver): - operation_id = operation["operationId"] - if not operation_id.endswith(self._operation_name): - return operation - - for param in self._extra_parameters: - operation["parameters"].append(resolve_lazy_ref(param, resolver)) - return operation - - -class ExtraResponseField(ComposableSchema): - """Define a schema that can add fields to the responses of the operation""" - - def __init__(self, operation_name, extra_fields, *args, **kwargs): - """Initialize the schema - - Args: - operation_name (string): the name of the operation to transform - extra_fields (dict()): description of the fields to add to the responses. The format is - { - parameters: list[openapi.Parameter](params1, params2, ...), - responses: { - code1: { - field1: openapi.Schema, - field2: openapi.Schema, - ... - }, - code2: ... - } - } - """ - super().__init__(*args, **kwargs) - self._extra_fields = extra_fields - self._operation_name = operation_name - - def transform_operation(self, operation, resolver): - operation_id = operation["operationId"] - if not operation_id.endswith(self._operation_name): - return operation - - responses = operation["responses"] - for code, params in self._extra_fields.items(): - if code in responses: - original_schema = responses[code]["schema"] - schema = ( - original_schema - if isinstance(original_schema, Schema) - else resolve_ref(original_schema, resolver) - ) - schema = copy.deepcopy(schema) - - for name, param in params.items(): - schema["properties"][name] = resolve_lazy_ref( - param, resolver - ) - responses[code]["schema"] = schema - return operation diff --git a/dojo/api_v2/schema/utils.py b/dojo/api_v2/schema/utils.py deleted file mode 100644 index 1276202fc81..00000000000 --- a/dojo/api_v2/schema/utils.py +++ /dev/null @@ -1,63 +0,0 @@ -from drf_yasg.openapi import SchemaRef, Schema - - -class LazySchemaRef: - """Utility class to support SchemaRef definition without knowing the resolver. - The reference can be evaluated later in the context of a swagger generator - """ - - def __init__(self, schema_name, ignore_unresolved=False): - # Bind curried version of the SchemaRef init - self.schema_ref = lambda resolver: SchemaRef( - resolver, schema_name, ignore_unresolved - ) - - def apply(self, resolver): - """Resolve the LazySchemaRef with the given resolver - - Args: - resolver (ReferenceResolver): resolver containing the schema refs - - Returns: - SchemaRef: the corresponding SchemaRef - """ - return self.schema_ref(resolver) - - -def try_apply(obj, resolver): - """Try to resolve a LazySchemaRef - - Args: - obj (object): the object to resolve - resolver (resolver): the resolver to use - - Returns: - object: the original object if it was not resolve otherwise the resolved LazySchemaRef - """ - if isinstance(obj, LazySchemaRef): - return obj.apply(resolver) - else: - return obj - - -def resolve_lazy_ref(schema, resolver): - """Recursively evaluate the schema to unbox LazySchemaRef based on the underlying resolvers. - - Args: - schema (object): the schema to evaluate - - Returns: - object: the schema without LazySchemaRef - """ - if not isinstance(schema, Schema): - return try_apply(schema, resolver) - - if "properties" in schema: - for prop_name, prop in schema["properties"].items(): - schema["properties"][prop_name] = resolve_lazy_ref(prop, resolver) - if "additionalProperties" in schema: - schema["additionalProperties"] = resolve_lazy_ref( - schema["additionalProperties"], resolver - ) - - return schema diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 5778f2147ca..cf6aec4a070 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -2,7 +2,6 @@ from django.contrib.auth.models import Group from typing import List from drf_spectacular.utils import extend_schema_field -from drf_yasg.utils import swagger_serializer_method from rest_framework.exceptions import NotFound from rest_framework.fields import DictField, MultipleChoiceField from datetime import datetime @@ -1500,17 +1499,14 @@ class RiskAcceptanceSerializer(serializers.ModelSerializer): path = serializers.SerializerMethodField() @extend_schema_field(serializers.CharField()) - @swagger_serializer_method(serializers.CharField()) def get_recommendation(self, obj): return Risk_Acceptance.TREATMENT_TRANSLATIONS.get(obj.recommendation) @extend_schema_field(serializers.CharField()) - @swagger_serializer_method(serializers.CharField()) def get_decision(self, obj): return Risk_Acceptance.TREATMENT_TRANSLATIONS.get(obj.decision) @extend_schema_field(serializers.CharField()) - @swagger_serializer_method(serializers.CharField()) def get_path(self, obj): engagement = Engagement.objects.filter( risk_acceptance__id__in=[obj.id] @@ -1526,7 +1522,6 @@ def get_path(self, obj): return path @extend_schema_field(serializers.IntegerField()) - @swagger_serializer_method(serializers.IntegerField()) def get_engagement(self, obj): engagement = Engagement.objects.filter( risk_acceptance__id__in=[obj.id] @@ -1629,14 +1624,12 @@ class FindingRelatedFieldsSerializer(serializers.Serializer): jira = serializers.SerializerMethodField() @extend_schema_field(FindingTestSerializer) - @swagger_serializer_method(FindingTestSerializer) def get_test(self, obj): return FindingTestSerializer(read_only=True).to_representation( obj.test ) @extend_schema_field(JIRAIssueSerializer) - @swagger_serializer_method(JIRAIssueSerializer) def get_jira(self, obj): issue = jira_helper.get_jira_issue(obj) if issue is None: @@ -1683,17 +1676,14 @@ class Meta: ) @extend_schema_field(serializers.DateTimeField()) - @swagger_serializer_method(serializers.DateTimeField()) def get_jira_creation(self, obj): return jira_helper.get_jira_creation(obj) @extend_schema_field(serializers.DateTimeField()) - @swagger_serializer_method(serializers.DateTimeField()) def get_jira_change(self, obj): return jira_helper.get_jira_change(obj) @extend_schema_field(FindingRelatedFieldsSerializer) - @swagger_serializer_method(FindingRelatedFieldsSerializer) def get_related_fields(self, obj): request = self.context.get("request", None) if request is None: @@ -1798,9 +1788,6 @@ def build_relational_field(self, field_name, relation_info): return super().build_relational_field(field_name, relation_info) @extend_schema_field(BurpRawRequestResponseSerializer) - @swagger_serializer_method( - serializer_or_field=BurpRawRequestResponseSerializer - ) def get_request_response(self, obj): # burp_req_resp = BurpRawRequestResponse.objects.filter(finding=obj) burp_req_resp = obj.burprawrequestresponse_set.all() @@ -2039,12 +2026,7 @@ def validate(self, data): def get_findings_count(self, obj) -> int: return obj.findings_count - # -> List[int] as return type doesn't seem enough for drf-yasg - @swagger_serializer_method( - serializer_or_field=serializers.ListField( - child=serializers.IntegerField() - ) - ) + # TODO, maybe extend_schema_field is needed here? def get_findings_list(self, obj) -> List[int]: return obj.open_findings_list @@ -3185,9 +3167,6 @@ class QuestionnaireEngagementSurveySerializer(serializers.ModelSerializer): questions = serializers.SerializerMethodField() @extend_schema_field(serializers.ListField(child=serializers.CharField())) - @swagger_serializer_method( - serializers.ListField(child=serializers.CharField()) - ) def get_questions(self, obj): questions = obj.questions.all() formated_questions = [] diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index fceb87c7ea2..793ff5d240b 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -6,9 +6,6 @@ from django.utils import timezone from django.contrib.auth.models import Permission from django.core.exceptions import ValidationError -from django.utils.decorators import method_decorator -from drf_yasg.inspectors.base import NotHandled -from drf_yasg.inspectors.query import CoreAPICompatInspector from rest_framework import viewsets, mixins, status from rest_framework.response import Response from django.db import IntegrityError @@ -16,8 +13,6 @@ from rest_framework.decorators import action from rest_framework.parsers import MultiPartParser from django_filters.rest_framework import DjangoFilterBackend -from drf_yasg import openapi -from drf_yasg.utils import swagger_auto_schema, no_body import base64 import mimetypes from dojo.engagement.services import close_engagement, reopen_engagement @@ -119,7 +114,6 @@ serializers, permissions, prefetch, - schema, mixins as dojo_mixins, ) import dojo.jira_link.helper as jira_helper @@ -242,10 +236,6 @@ class DojoGroupViewSet( queryset = Dojo_Group.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "name", "social_provider"] - swagger_schema = prefetch.get_prefetch_schema( - ["dojo_groups_list", "dojo_groups_read"], - serializers.DojoGroupSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasDojoGroupPermission, @@ -287,10 +277,6 @@ class DojoGroupMemberViewSet( queryset = Dojo_Group_Member.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "group_id", "user_id"] - swagger_schema = prefetch.get_prefetch_schema( - ["dojo_group_members_list", "dojo_group_members_read"], - serializers.DojoGroupMemberSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasDojoGroupMemberPermission, @@ -302,9 +288,6 @@ def get_queryset(self): @extend_schema( exclude=True ) - @swagger_auto_schema( - auto_schema=None - ) def partial_update(self, request, pk=None): # Object authorization won't work if not all data is provided response = {"message": "Patch function is not offered in this path."} @@ -319,10 +302,6 @@ class GlobalRoleViewSet( queryset = Global_Role.objects.all() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "user", "group", "role"] - swagger_schema = prefetch.get_prefetch_schema( - ["global_roles_list", "global_roles_read"], - serializers.GlobalRoleSerializer, - ).to_schema() permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) @@ -334,9 +313,7 @@ class EndPointViewSet( queryset = Endpoint.objects.none() filter_backends = (DjangoFilterBackend,) filterset_class = ApiEndpointFilter - swagger_schema = prefetch.get_prefetch_schema( - ["endpoints_list", "endpoints_read"], serializers.EndpointSerializer - ).to_schema() + permission_classes = ( IsAuthenticated, permissions.UserHasEndpointPermission, @@ -349,10 +326,6 @@ def get_queryset(self): request=serializers.ReportGenerateOptionSerializer, responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, ) - @swagger_auto_schema( - request_body=serializers.ReportGenerateOptionSerializer, - responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, - ) @action( detail=True, methods=["post"], permission_classes=[IsAuthenticated] ) @@ -403,10 +376,7 @@ class EndpointStatusViewSet( "finding", "endpoint", ] - swagger_schema = prefetch.get_prefetch_schema( - ["endpoint_status_list", "endpoint_status_read"], - serializers.EndpointStatusSerializer, - ).to_schema() + permission_classes = ( IsAuthenticated, permissions.UserHasEndpointStatusPermission, @@ -427,19 +397,7 @@ class EngagementViewSet( queryset = Engagement.objects.none() filter_backends = (DjangoFilterBackend,) filterset_class = ApiEngagementFilter - swagger_schema = ( - prefetch.get_prefetch_schema( - ["engagements_list", "engagements_read"], - serializers.EngagementSerializer, - ) - .composeWith( - prefetch.get_prefetch_schema( - ["engagements_complete_checklist_read"], - serializers.EngagementCheckListSerializer, - ) - ) - .to_schema() - ) + permission_classes = ( IsAuthenticated, permissions.UserHasEngagementPermission, @@ -468,9 +426,6 @@ def get_queryset(self): @extend_schema( request=OpenApiTypes.NONE, responses={status.HTTP_200_OK: ""} ) - @swagger_auto_schema( - request_body=no_body, responses={status.HTTP_200_OK: ""} - ) @action(detail=True, methods=["post"]) def close(self, request, pk=None): eng = self.get_object() @@ -480,9 +435,6 @@ def close(self, request, pk=None): @extend_schema( request=OpenApiTypes.NONE, responses={status.HTTP_200_OK: ""} ) - @swagger_auto_schema( - request_body=no_body, responses={status.HTTP_200_OK: ""} - ) @action(detail=True, methods=["post"]) def reopen(self, request, pk=None): eng = self.get_object() @@ -493,10 +445,6 @@ def reopen(self, request, pk=None): request=serializers.ReportGenerateOptionSerializer, responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, ) - @swagger_auto_schema( - request_body=serializers.ReportGenerateOptionSerializer, - responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, - ) @action( detail=True, methods=["post"], permission_classes=[IsAuthenticated] ) @@ -541,17 +489,6 @@ def generate_report(self, request, pk=None): request=serializers.AddNewNoteOptionSerializer, responses={status.HTTP_201_CREATED: serializers.NoteSerializer}, ) - @swagger_auto_schema( - method="get", - responses={ - status.HTTP_200_OK: serializers.EngagementToNotesSerializer - }, - ) - @swagger_auto_schema( - methods=["post"], - request_body=serializers.AddNewNoteOptionSerializer, - responses={status.HTTP_201_CREATED: serializers.NoteSerializer}, - ) @action(detail=True, methods=["get", "post"]) def notes(self, request, pk=None): engagement = self.get_object() @@ -602,17 +539,6 @@ def notes(self, request, pk=None): request=serializers.AddNewFileOptionSerializer, responses={status.HTTP_201_CREATED: serializers.FileSerializer}, ) - @swagger_auto_schema( - method="get", - responses={ - status.HTTP_200_OK: serializers.EngagementToFilesSerializer - }, - ) - @swagger_auto_schema( - method="post", - request_body=serializers.AddNewFileOptionSerializer, - responses={status.HTTP_201_CREATED: serializers.FileSerializer}, - ) @action( detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,) ) @@ -650,13 +576,6 @@ def files(self, request, pk=None): status.HTTP_201_CREATED: serializers.EngagementCheckListSerializer }, ) - @swagger_auto_schema( - method="post", - request_body=serializers.EngagementCheckListSerializer, - responses={ - status.HTTP_201_CREATED: serializers.EngagementCheckListSerializer - }, - ) @action(detail=True, methods=["get", "post"]) def complete_checklist(self, request, pk=None): from dojo.api_v2.prefetch.prefetcher import _Prefetcher @@ -702,12 +621,6 @@ def complete_checklist(self, request, pk=None): status.HTTP_200_OK: serializers.RawFileSerializer, }, ) - @swagger_auto_schema( - method="get", - responses={ - status.HTTP_200_OK: serializers.RawFileSerializer, - }, - ) @action( detail=True, methods=["get"], @@ -749,10 +662,7 @@ class RiskAcceptanceViewSet( queryset = Risk_Acceptance.objects.none() filter_backends = (DjangoFilterBackend,) filterset_class = ApiRiskAcceptanceFilter - swagger_schema = prefetch.get_prefetch_schema( - ["risk_acceptance_list", "risk_acceptance_read"], - serializers.RiskAcceptanceSerializer, - ).to_schema() + permission_classes = ( IsAuthenticated, permissions.UserHasRiskAcceptancePermission, @@ -773,12 +683,6 @@ def get_queryset(self): status.HTTP_200_OK: serializers.RiskAcceptanceProofSerializer, }, ) - @swagger_auto_schema( - method="get", - responses={ - status.HTTP_200_OK: serializers.RiskAcceptanceProofSerializer, - }, - ) @action(detail=True, methods=["get"]) def download_proof(self, request, pk=None): risk_acceptance = self.get_object() @@ -815,10 +719,7 @@ class AppAnalysisViewSet( queryset = App_Analysis.objects.none() filter_backends = (DjangoFilterBackend,) filterset_class = ApiAppAnalysisFilter - swagger_schema = prefetch.get_prefetch_schema( - ["technologies_list", "technologies_read"], - serializers.AppAnalysisSerializer, - ).to_schema() + permission_classes = ( IsAuthenticated, permissions.UserHasAppAnalysisPermission, @@ -835,10 +736,7 @@ class CredentialsViewSet( serializer_class = serializers.CredentialSerializer queryset = Cred_User.objects.all() filter_backends = (DjangoFilterBackend,) - swagger_schema = prefetch.get_prefetch_schema( - ["credentials_list", "credentials_read"], - serializers.CredentialSerializer, - ).to_schema() + permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) @@ -850,10 +748,7 @@ class CredentialsMappingViewSet( queryset = Cred_Mapping.objects.none() filter_backends = (DjangoFilterBackend,) filterset_class = ApiCredentialsFilter - swagger_schema = prefetch.get_prefetch_schema( - ["credential_mappings_list", "credential_mappings_read"], - serializers.CredentialMappingSerializer, - ).to_schema() + permission_classes = ( IsAuthenticated, permissions.UserHasCredentialPermission, @@ -934,27 +829,6 @@ class FindingViewSet( permissions.UserHasFindingPermission, ) - _related_field_parameters = [ - openapi.Parameter( - name="related_fields", - in_=openapi.IN_QUERY, - description="Expand finding external relations (engagement, environment, product, product_type, test, test_type)", - type=openapi.TYPE_BOOLEAN, - ) - ] - swagger_schema = ( - prefetch.get_prefetch_schema( - ["findings_list", "findings_read"], serializers.FindingSerializer - ) - .composeWith( - schema.ExtraParameters("findings_list", _related_field_parameters) - ) - .composeWith( - schema.ExtraParameters("findings_read", _related_field_parameters) - ) - .to_schema() - ) - # Overriding mixins.UpdateModeMixin perform_update() method to grab push_to_jira # data and add that as a parameter to .save() def perform_update(self, serializer): @@ -1003,11 +877,6 @@ def get_serializer_class(self): request=serializers.FindingCloseSerializer, responses={status.HTTP_200_OK: serializers.FindingCloseSerializer}, ) - @swagger_auto_schema( - method="post", - request_body=serializers.FindingCloseSerializer, - responses={status.HTTP_200_OK: serializers.FindingCloseSerializer}, - ) @action(detail=True, methods=["post"]) def close(self, request, pk=None): finding = self.get_object() @@ -1069,14 +938,6 @@ def close(self, request, pk=None): request=serializers.TagSerializer, responses={status.HTTP_201_CREATED: serializers.TagSerializer}, ) - @swagger_auto_schema( - method="get", responses={status.HTTP_200_OK: serializers.TagSerializer} - ) - @swagger_auto_schema( - method="post", - request_body=serializers.TagSerializer, - responses={status.HTTP_200_OK: serializers.TagSerializer}, - ) @action(detail=True, methods=["get", "post"]) def tags(self, request, pk=None): finding = self.get_object() @@ -1118,19 +979,6 @@ def tags(self, request, pk=None): status.HTTP_201_CREATED: serializers.BurpRawRequestResponseSerializer }, ) - @swagger_auto_schema( - method="get", - responses={ - status.HTTP_200_OK: serializers.BurpRawRequestResponseSerializer - }, - ) - @swagger_auto_schema( - method="post", - request_body=serializers.BurpRawRequestResponseSerializer, - responses={ - status.HTTP_200_OK: serializers.BurpRawRequestResponseSerializer - }, - ) @action(detail=True, methods=["get", "post"]) def request_response(self, request, pk=None): finding = self.get_object() @@ -1177,15 +1025,6 @@ def request_response(self, request, pk=None): request=serializers.AddNewNoteOptionSerializer, responses={status.HTTP_201_CREATED: serializers.NoteSerializer}, ) - @swagger_auto_schema( - method="get", - responses={status.HTTP_200_OK: serializers.FindingToNotesSerializer}, - ) - @swagger_auto_schema( - methods=["post"], - request_body=serializers.AddNewNoteOptionSerializer, - responses={status.HTTP_201_CREATED: serializers.NoteSerializer}, - ) @action(detail=True, methods=["get", "post"]) def notes(self, request, pk=None): finding = self.get_object() @@ -1239,15 +1078,6 @@ def notes(self, request, pk=None): request=serializers.AddNewFileOptionSerializer, responses={status.HTTP_201_CREATED: serializers.FileSerializer}, ) - @swagger_auto_schema( - method="get", - responses={status.HTTP_200_OK: serializers.FindingToFilesSerializer}, - ) - @swagger_auto_schema( - method="post", - request_body=serializers.AddNewFileOptionSerializer, - responses={status.HTTP_201_CREATED: serializers.FileSerializer}, - ) @action( detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,) ) @@ -1284,12 +1114,6 @@ def files(self, request, pk=None): status.HTTP_200_OK: serializers.RawFileSerializer, }, ) - @swagger_auto_schema( - method="get", - responses={ - status.HTTP_200_OK: serializers.RawFileSerializer, - }, - ) @action( detail=True, methods=["get"], @@ -1327,10 +1151,6 @@ def download_file(self, request, file_id, pk=None): request=serializers.FindingNoteSerializer, responses={status.HTTP_204_NO_CONTENT: ""}, ) - @swagger_auto_schema( - request_body=serializers.FindingNoteSerializer, - responses={status.HTTP_204_NO_CONTENT: ""}, - ) @action(detail=True, methods=["patch"]) def remove_note(self, request, pk=None): """Remove Note From Finding Note""" @@ -1370,11 +1190,6 @@ def remove_note(self, request, pk=None): request=serializers.TagSerializer, responses={status.HTTP_204_NO_CONTENT: ""}, ) - @swagger_auto_schema( - methods=["put", "patch"], - request_body=serializers.TagSerializer, - responses={status.HTTP_204_NO_CONTENT: ""}, - ) @action(detail=True, methods=["put", "patch"]) def remove_tags(self, request, pk=None): """Remove Tag(s) from finding list of tags""" @@ -1423,11 +1238,6 @@ def remove_tags(self, request, pk=None): status.HTTP_200_OK: serializers.FindingSerializer(many=True) } ) - @swagger_auto_schema( - responses={ - status.HTTP_200_OK: serializers.FindingSerializer(many=True) - } - ) @action( detail=True, methods=["get"], @@ -1447,10 +1257,6 @@ def get_duplicate_cluster(self, request, pk): request=OpenApiTypes.NONE, responses={status.HTTP_204_NO_CONTENT: ""}, ) - @swagger_auto_schema( - request_body=no_body, - responses={status.HTTP_204_NO_CONTENT: ""}, - ) @action(detail=True, methods=["post"], url_path=r"duplicate/reset") def reset_finding_duplicate_status(self, request, pk): checked_duplicate_id = reset_finding_duplicate_status_internal( @@ -1469,9 +1275,6 @@ def reset_finding_duplicate_status(self, request, pk): ], responses={status.HTTP_204_NO_CONTENT: ""}, ) - @swagger_auto_schema( - responses={status.HTTP_204_NO_CONTENT: ""}, request_body=no_body - ) @action( detail=True, methods=["post"], url_path=r"original/(?P\d+)" ) @@ -1485,10 +1288,6 @@ def set_finding_as_original(self, request, pk, new_fid): request=serializers.ReportGenerateOptionSerializer, responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, ) - @swagger_auto_schema( - request_body=serializers.ReportGenerateOptionSerializer, - responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, - ) @action( detail=False, methods=["post"], permission_classes=[IsAuthenticated] ) @@ -1622,10 +1421,6 @@ def _remove_metadata(self, request, finding): description="Returned if there was a problem with the metadata information" ), }, - # manual_parameters=[openapi.Parameter( - # name="name", in_=openapi.IN_QUERY, type=openapi.TYPE_STRING, - # description="name of the metadata to retrieve. If name is empty, return all the \ - # metadata associated with the finding")] ) @extend_schema( methods=["PUT"], @@ -1639,9 +1434,6 @@ def _remove_metadata(self, request, finding): description="Returned if there was a problem with the metadata information" ), }, - # manual_parameters=[openapi.Parameter( - # name="name", in_=openapi.IN_QUERY, required=True, type=openapi.TYPE_STRING, - # description="name of the metadata to edit")], ) @extend_schema( methods=["POST"], @@ -1656,58 +1448,6 @@ def _remove_metadata(self, request, finding): ), }, ) - @swagger_auto_schema( - responses={ - status.HTTP_200_OK: serializers.FindingMetaSerializer(many=True), - status.HTTP_404_NOT_FOUND: "Returned if finding does not exist", - }, - methods=["get"], - ) - @swagger_auto_schema( - responses={ - status.HTTP_200_OK: "Returned if the metadata was correctly deleted", - status.HTTP_404_NOT_FOUND: "Returned if finding does not exist", - status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information", - }, - methods=["delete"], - manual_parameters=[ - openapi.Parameter( - name="name", - in_=openapi.IN_QUERY, - required=True, - type=openapi.TYPE_STRING, - description="name of the metadata to retrieve. If name is empty, return all the \ - metadata associated with the finding", - ) - ], - ) - @swagger_auto_schema( - responses={ - status.HTTP_200_OK: serializers.FindingMetaSerializer, - status.HTTP_404_NOT_FOUND: "Returned if finding does not exist", - status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information", - }, - methods=["put"], - manual_parameters=[ - openapi.Parameter( - name="name", - in_=openapi.IN_QUERY, - required=True, - type=openapi.TYPE_STRING, - description="name of the metadata to edit", - ) - ], - request_body=serializers.FindingMetaSerializer, - ) - @swagger_auto_schema( - responses={ - status.HTTP_200_OK: serializers.FindingMetaSerializer, - status.HTTP_404_NOT_FOUND: "Returned if finding does not exist", - status.HTTP_400_BAD_REQUEST: "Returned if there was a problem with the metadata information", - }, - methods=["post"], - request_body=serializers.FindingMetaSerializer, - ) @action( detail=True, methods=["post", "put", "delete", "get"], @@ -1759,10 +1499,7 @@ class JiraIssuesViewSet( "engagement", "finding_group", ] - swagger_schema = prefetch.get_prefetch_schema( - ["jira_finding_mappings_list", "jira_finding_mappings_read"], - serializers.JIRAIssueSerializer, - ).to_schema() + permission_classes = ( IsAuthenticated, permissions.UserHasJiraIssuePermission, @@ -1790,10 +1527,7 @@ class JiraProjectViewSet( "enable_engagement_epic_mapping", "push_notes", ] - swagger_schema = prefetch.get_prefetch_schema( - ["jira_projects_list", "jira_projects_read"], - serializers.JIRAProjectSerializer, - ).to_schema() + permission_classes = ( IsAuthenticated, permissions.UserHasJiraProductPermission, @@ -1846,13 +1580,6 @@ class ProductAPIScanConfigurationViewSet( "service_key_2", "service_key_3", ] - swagger_schema = prefetch.get_prefetch_schema( - [ - "product_api_scan_configurations_list", - "product_api_scan_configurations_read", - ], - serializers.ProductAPIScanConfigurationSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasProductAPIScanConfigurationPermission, @@ -1907,34 +1634,14 @@ class DojoMetaViewSet( IsAuthenticated, permissions.UserHasDojoMetaPermission, ) - swagger_schema = prefetch.get_prefetch_schema( - ["metadata_list", "metadata_read"], serializers.MetaSerializer - ).to_schema() + # swagger_schema = prefetch.get_prefetch_schema( + # ["metadata_list", "metadata_read"], serializers.MetaSerializer + # ).to_schema() def get_queryset(self): return get_authorized_dojo_meta(Permissions.Product_View) -# Authorization: object-based -class DjangoFilterDescriptionInspector(CoreAPICompatInspector): - def get_filter_parameters(self, filter_backend): - if isinstance(filter_backend, DjangoFilterBackend): - result = super( - DjangoFilterDescriptionInspector, self - ).get_filter_parameters(filter_backend) - for param in result: - if not param.get("description", ""): - param.description = ( - "Filter the returned list by {field_name}".format( - field_name=param.name - ) - ) - - return result - - return NotHandled - - @extend_schema_view( list=extend_schema( parameters=[ @@ -1959,12 +1666,6 @@ def get_filter_parameters(self, filter_backend): ], ), ) -@method_decorator( - name="list", - decorator=swagger_auto_schema( - filter_inspectors=[DjangoFilterDescriptionInspector] - ), -) class ProductViewSet( prefetch.PrefetchListMixin, prefetch.PrefetchRetrieveMixin, @@ -1980,9 +1681,6 @@ class ProductViewSet( filter_backends = (DjangoFilterBackend,) filterset_class = ApiProductFilter - swagger_schema = prefetch.get_prefetch_schema( - ["products_list", "products_read"], serializers.ProductSerializer - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasProductPermission, @@ -2011,10 +1709,6 @@ def destroy(self, request, *args, **kwargs): request=serializers.ReportGenerateOptionSerializer, responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, ) - @swagger_auto_schema( - request_body=serializers.ReportGenerateOptionSerializer, - responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, - ) @action( detail=True, methods=["post"], permission_classes=[IsAuthenticated] ) @@ -2081,10 +1775,6 @@ class ProductMemberViewSet( queryset = Product_Member.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "product_id", "user_id"] - swagger_schema = prefetch.get_prefetch_schema( - ["product_members_list", "product_members_read"], - serializers.ProductMemberSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasProductMemberPermission, @@ -2098,9 +1788,6 @@ def get_queryset(self): @extend_schema( exclude=True ) - @swagger_auto_schema( - auto_schema=None - ) def partial_update(self, request, pk=None): # Object authorization won't work if not all data is provided response = {"message": "Patch function is not offered in this path."} @@ -2139,10 +1826,6 @@ class ProductGroupViewSet( queryset = Product_Group.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "product_id", "group_id"] - swagger_schema = prefetch.get_prefetch_schema( - ["product_groups_list", "product_groups_read"], - serializers.ProductGroupSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasProductGroupPermission, @@ -2156,9 +1839,6 @@ def get_queryset(self): @extend_schema( exclude=True ) - @swagger_auto_schema( - auto_schema=None - ) def partial_update(self, request, pk=None): # Object authorization won't work if not all data is provided response = {"message": "Patch function is not offered in this path."} @@ -2204,10 +1884,6 @@ class ProductTypeViewSet( "created", "updated", ] - swagger_schema = prefetch.get_prefetch_schema( - ["product_types_list", "product_types_read"], - serializers.ProductTypeSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasProductTypePermission, @@ -2243,10 +1919,6 @@ def destroy(self, request, *args, **kwargs): request=serializers.ReportGenerateOptionSerializer, responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, ) - @swagger_auto_schema( - request_body=serializers.ReportGenerateOptionSerializer, - responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, - ) @action( detail=True, methods=["post"], permission_classes=[IsAuthenticated] ) @@ -2313,10 +1985,6 @@ class ProductTypeMemberViewSet( queryset = Product_Type_Member.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "product_type_id", "user_id"] - swagger_schema = prefetch.get_prefetch_schema( - ["product_type_members_list", "product_type_members_read"], - serializers.ProductTypeMemberSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasProductTypeMemberPermission, @@ -2344,9 +2012,6 @@ def destroy(self, request, *args, **kwargs): @extend_schema( exclude=True ) - @swagger_auto_schema( - auto_schema=None - ) def partial_update(self, request, pk=None): # Object authorization won't work if not all data is provided response = {"message": "Patch function is not offered in this path."} @@ -2385,10 +2050,6 @@ class ProductTypeGroupViewSet( queryset = Product_Type_Group.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "product_type_id", "group_id"] - swagger_schema = prefetch.get_prefetch_schema( - ["product_type_groups_list", "product_type_groups_read"], - serializers.ProductTypeGroupSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasProductTypeGroupPermission, @@ -2402,9 +2063,6 @@ def get_queryset(self): @extend_schema( exclude=True ) - @swagger_auto_schema( - auto_schema=None - ) def partial_update(self, request, pk=None): # Object authorization won't work if not all data is provided response = {"message": "Patch function is not offered in this path."} @@ -2419,10 +2077,6 @@ class StubFindingsViewSet( queryset = Stub_Finding.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "title", "date", "severity", "description"] - swagger_schema = prefetch.get_prefetch_schema( - ["stub_findings_list", "stub_findings_read"], - serializers.StubFindingSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasFindingPermission, @@ -2459,9 +2113,6 @@ class TestsViewSet( queryset = Test.objects.none() filter_backends = (DjangoFilterBackend,) filterset_class = ApiTestFilter - swagger_schema = prefetch.get_prefetch_schema( - ["tests_list", "tests_read"], serializers.TestSerializer - ).to_schema() permission_classes = (IsAuthenticated, permissions.UserHasTestPermission) @property @@ -2496,10 +2147,6 @@ def get_serializer_class(self): request=serializers.ReportGenerateOptionSerializer, responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, ) - @swagger_auto_schema( - request_body=serializers.ReportGenerateOptionSerializer, - responses={status.HTTP_200_OK: serializers.ReportGenerateSerializer}, - ) @action( detail=True, methods=["post"], permission_classes=[IsAuthenticated] ) @@ -2542,15 +2189,6 @@ def generate_report(self, request, pk=None): request=serializers.AddNewNoteOptionSerializer, responses={status.HTTP_201_CREATED: serializers.NoteSerializer}, ) - @swagger_auto_schema( - method="get", - responses={status.HTTP_200_OK: serializers.TestToNotesSerializer}, - ) - @swagger_auto_schema( - methods=["post"], - request_body=serializers.AddNewNoteOptionSerializer, - responses={status.HTTP_201_CREATED: serializers.NoteSerializer}, - ) @action(detail=True, methods=["get", "post"]) def notes(self, request, pk=None): test = self.get_object() @@ -2599,15 +2237,6 @@ def notes(self, request, pk=None): request=serializers.AddNewFileOptionSerializer, responses={status.HTTP_201_CREATED: serializers.FileSerializer}, ) - @swagger_auto_schema( - method="get", - responses={status.HTTP_200_OK: serializers.TestToFilesSerializer}, - ) - @swagger_auto_schema( - method="post", - request_body=serializers.AddNewFileOptionSerializer, - responses={status.HTTP_201_CREATED: serializers.FileSerializer}, - ) @action( detail=True, methods=["get", "post"], parser_classes=(MultiPartParser,) ) @@ -2644,12 +2273,6 @@ def files(self, request, pk=None): status.HTTP_200_OK: serializers.RawFileSerializer, }, ) - @swagger_auto_schema( - method="get", - responses={ - status.HTTP_200_OK: serializers.RawFileSerializer, - }, - ) @action( detail=True, methods=["get"], @@ -2740,10 +2363,6 @@ class TestImportViewSet( "test_import_finding_action__finding", "test_import_finding_action__created", ] - swagger_schema = prefetch.get_prefetch_schema( - ["test_imports_list", "test_imports_read"], - serializers.TestImportSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasTestImportPermission, @@ -2798,10 +2417,6 @@ class ToolConfigurationsViewSet( "url", "authentication_type", ] - swagger_schema = prefetch.get_prefetch_schema( - ["tool_configurations_list", "tool_configurations_read"], - serializers.ToolConfigurationSerializer, - ).to_schema() permission_classes = (permissions.UserHasConfigurationPermissionSuperuser,) @@ -2820,10 +2435,6 @@ class ToolProductSettingsViewSet( "tool_project_id", "url", ] - swagger_schema = prefetch.get_prefetch_schema( - ["tool_configurations_list", "tool_configurations_read"], - serializers.ToolConfigurationSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasToolProductSettingsPermission, @@ -2914,10 +2525,6 @@ class UserContactInfoViewSet( ): serializer_class = serializers.UserContactInfoSerializer queryset = UserContactInfo.objects.all() - swagger_schema = prefetch.get_prefetch_schema( - ["user_contact_infos_list", "user_contact_infos_read"], - serializers.UserContactInfoSerializer, - ).to_schema() filter_backends = (DjangoFilterBackend,) filterset_fields = "__all__" permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) @@ -2929,10 +2536,6 @@ class UserProfileView(GenericAPIView): pagination_class = None serializer_class = serializers.UserProfileSerializer - @swagger_auto_schema( - method="get", - responses={status.HTTP_200_OK: serializers.UserProfileSerializer}, - ) @action( detail=True, methods=["get"], filter_backends=[], pagination_class=None ) @@ -3110,9 +2713,6 @@ class LanguageViewSet( queryset = Languages.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "language", "product"] - swagger_schema = prefetch.get_prefetch_schema( - ["languages_list", "languages_read"], serializers.LanguageSerializer - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasLanguagePermission, @@ -3592,10 +3192,6 @@ class NotificationsViewSet( filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "user", "product", "template"] permission_classes = (permissions.IsSuperUser, DjangoModelPermissions) - swagger_schema = prefetch.get_prefetch_schema( - ["notifications_list", "notifications_read"], - serializers.NotificationsSerializer, - ).to_schema() class EngagementPresetsViewset( @@ -3605,10 +3201,6 @@ class EngagementPresetsViewset( queryset = Engagement_Presets.objects.none() filter_backends = (DjangoFilterBackend,) filterset_fields = ["id", "title", "product"] - swagger_schema = prefetch.get_prefetch_schema( - ["engagement_presets_list", "engagement_presets_read"], - serializers.EngagementPresetsSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasEngagementPresetPermission, @@ -3624,10 +3216,6 @@ class EngagementCheckListViewset( serializer_class = serializers.EngagementCheckListSerializer queryset = Check_List.objects.none() filter_backends = (DjangoFilterBackend,) - swagger_schema = prefetch.get_prefetch_schema( - ["engagement_checklists_list", "engagement_checklists_read"], - serializers.EngagementCheckListSerializer, - ).to_schema() permission_classes = ( IsAuthenticated, permissions.UserHasEngagementPermission, @@ -3731,13 +3319,6 @@ class QuestionnaireAnsweredSurveyViewSet( permissions.UserHasEngagementPermission, DjangoModelPermissions, ) - swagger_schema = prefetch.get_prefetch_schema( - [ - "questionnaire_answered_questionnaires_list", - "questionnaire_answered_questionnaires_read", - ], - serializers.QuestionnaireAnsweredSurveySerializer, - ).to_schema() # Authorization: configuration diff --git a/dojo/risk_acceptance/api.py b/dojo/risk_acceptance/api.py index d862453a2fc..b23a0d1dfc6 100644 --- a/dojo/risk_acceptance/api.py +++ b/dojo/risk_acceptance/api.py @@ -7,7 +7,6 @@ from rest_framework.decorators import action from rest_framework.permissions import IsAdminUser from rest_framework.response import Response -from drf_yasg.utils import swagger_auto_schema from dojo.api_v2.serializers import RiskAcceptanceSerializer from dojo.models import Risk_Acceptance, User, Vulnerability_Id @@ -38,10 +37,6 @@ class AcceptedRisksMixin(ABC): def risk_application_model_class(self): pass - @swagger_auto_schema( - request_body=AcceptedRiskSerializer(many=True), - responses={status.HTTP_201_CREATED: RiskAcceptanceSerializer(many=True)}, - ) @extend_schema( request=AcceptedRiskSerializer(many=True), responses={status.HTTP_201_CREATED: RiskAcceptanceSerializer(many=True)}, @@ -65,10 +60,6 @@ def accept_risks(self, request, pk=None): class AcceptedFindingsMixin(ABC): - @swagger_auto_schema( - request_body=AcceptedRiskSerializer(many=True), - responses={status.HTTP_201_CREATED: RiskAcceptanceSerializer(many=True)}, - ) @extend_schema( request=AcceptedRiskSerializer(many=True), responses={status.HTTP_201_CREATED: RiskAcceptanceSerializer(many=True)}, diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 5b772a6daef..5c86f79ddfd 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -753,29 +753,6 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param if API_TOKENS_ENABLED: REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] += ('rest_framework.authentication.TokenAuthentication',) -SWAGGER_SETTINGS = { - 'SECURITY_DEFINITIONS': { - 'basicAuth': { - 'type': 'basic' - }, - 'cookieAuth': { - 'type': 'apiKey', - 'in': 'cookie', - 'name': 'sessionid' - }, - }, - 'DOC_EXPANSION': "none", - 'JSON_EDITOR': True, - 'SHOW_REQUEST_HEADERS': True, -} - -if API_TOKENS_ENABLED: - SWAGGER_SETTINGS['SECURITY_DEFINITIONS']['tokenAuth'] = { - 'type': 'apiKey', - 'in': 'header', - 'name': 'Authorization' - } - SPECTACULAR_SETTINGS = { 'TITLE': 'Defect Dojo API v2', 'DESCRIPTION': 'Defect Dojo - Open Source vulnerability Management made easy. Prefetch related parameters/responses not yet in the schema.', @@ -849,7 +826,6 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param 'dbbackup', 'django_celery_results', 'social_django', - 'drf_yasg', 'drf_spectacular', 'drf_spectacular_sidecar', # required for Django collectstatic discovery 'tagulous', @@ -1083,11 +1059,6 @@ def saml2_attrib_map_format(dict): ('dojo.remote_user.RemoteUserAuthentication',) + \ REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] - SWAGGER_SETTINGS['SECURITY_DEFINITIONS']['remoteUserAuth'] = { - 'type': 'apiKey', - 'in': 'header', - 'name': AUTH_REMOTEUSER_USERNAME_HEADER[5:].replace('_', '-') - } # ------------------------------------------------------------------------------ # CELERY # ------------------------------------------------------------------------------ @@ -1718,6 +1689,5 @@ def saml2_attrib_map_format(dict): from django.utils.deprecation import RemovedInDjango50Warning warnings.filterwarnings("ignore", category=RemovedInDjango50Warning) warnings.filterwarnings("ignore", message="invalid escape sequence.*") - warnings.filterwarnings("ignore", message="'cgi' is deprecated and slated for removal in Python 3\\.13") warnings.filterwarnings("ignore", message="DateTimeField .+ received a naive datetime .+ while time zone support is active\\.") warnings.filterwarnings("ignore", message="unclosed file .+") diff --git a/dojo/urls.py b/dojo/urls.py index fa15f977da9..4500e1e49dd 100755 --- a/dojo/urls.py +++ b/dojo/urls.py @@ -4,9 +4,6 @@ from django.contrib import admin from rest_framework.routers import DefaultRouter from rest_framework.authtoken import views as tokenviews -from rest_framework import permissions -from drf_yasg.views import get_schema_view -from drf_yasg import openapi from django.http import HttpResponse from dojo import views from dojo.api_v2.views import EndPointViewSet, EngagementViewSet, \ @@ -183,20 +180,6 @@ ) ] -schema_view = get_schema_view( - openapi.Info( - title="Defect Dojo API", - default_version='v2', - description="To use the API you need be authorized.\n\n## Deprecated - Removal in v2.30.0\n#### Please use the [OpenAPI3 version](/api/v2/oa3/swagger-ui/)", - ), - # if public=False, includes only endpoints the current user has access to - public=True, - # The API of a OpenSource project should be public accessible - permission_classes=[permissions.AllowAny], - # url pattersns specific to the API - patterns=api_v2_urls, -) - urlpatterns = [] # sometimes urlpatterns needed be added from local_settings.py before other URLs of core dojo @@ -208,9 +191,6 @@ re_path(r'^%shistory/(?P\d+)/(?P\d+)$' % get_system_setting('url_prefix'), views.action_history, name='action_history'), re_path(r'^%s' % get_system_setting('url_prefix'), include(ur)), - # drf-yasg = OpenAPI2 - re_path(r'^%sapi/v2/doc/' % get_system_setting('url_prefix'), schema_view.with_ui('swagger', cache_timeout=0), name='api_v2_schema'), - # drf-spectacular = OpenAPI3 re_path(r'^%sapi/v2/oa3/schema/' % get_system_setting('url_prefix'), SpectacularAPIView.as_view(), name='schema_oa3'), re_path(r'^%sapi/v2/oa3/swagger-ui/' % get_system_setting('url_prefix'), SpectacularSwaggerView.as_view(url=get_system_setting('url_prefix') + '/api/v2/oa3/schema/?format=json'), name='swagger-ui_oa3'), diff --git a/requirements.txt b/requirements.txt index e7821926bba..597607d7497 100644 --- a/requirements.txt +++ b/requirements.txt @@ -56,7 +56,6 @@ Python-jose==3.3.0 gitpython==3.1.41 debugpy==1.8.0 python-gitlab==4.4.0 -drf_yasg==1.21.5 cpe==1.2.1 packageurl-python==0.13.4 django-crum==0.7.9 diff --git a/unittests/test_apiv2_metadata.py b/unittests/test_apiv2_metadata.py index 3e39dc2bbc0..6da260ec32e 100644 --- a/unittests/test_apiv2_metadata.py +++ b/unittests/test_apiv2_metadata.py @@ -26,7 +26,7 @@ def create(self, **kwargs): return self.client.post(reverse('metadata-list'), kwargs, format='json') def test_docs(self): - r = self.client.get(reverse('api_v2_schema')) + r = self.client.get(reverse('swagger-ui_oa3')) self.assertEqual(r.status_code, 200) def test_query_metadata(self): diff --git a/unittests/test_swagger_schema.py b/unittests/test_swagger_schema.py deleted file mode 100644 index b1263359374..00000000000 --- a/unittests/test_swagger_schema.py +++ /dev/null @@ -1,835 +0,0 @@ -from django.test import tag -from rest_framework.test import APIRequestFactory -from rest_framework.views import APIView -from rest_framework.test import APITestCase, force_authenticate, APIClient -from rest_framework.mixins import \ - RetrieveModelMixin, ListModelMixin, CreateModelMixin, UpdateModelMixin -from rest_framework import status -from drf_yasg.generators import OpenAPISchemaGenerator -from drf_yasg.openapi import Info, SchemaRef -from drf_yasg.openapi import \ - TYPE_ARRAY, TYPE_BOOLEAN, TYPE_INTEGER, TYPE_NUMBER, TYPE_OBJECT, TYPE_STRING -from collections import OrderedDict - -from dojo.api_v2.views import \ - DevelopmentEnvironmentViewSet, EndpointStatusViewSet, EndPointViewSet, \ - EngagementViewSet, FindingTemplatesViewSet, FindingViewSet, \ - JiraInstanceViewSet, DojoMetaViewSet, NoteTypeViewSet, NotesViewSet, \ - ProductTypeViewSet, ProductViewSet, RegulationsViewSet, \ - SonarqubeIssueViewSet, ProductAPIScanConfigurationViewSet, \ - SonarqubeIssueTransitionViewSet, StubFindingsViewSet, SystemSettingsViewSet, \ - TestTypesViewSet, TestsViewSet, ToolConfigurationsViewSet, ToolProductSettingsViewSet, \ - ToolTypesViewSet, UsersViewSet, JiraIssuesViewSet, JiraProjectViewSet, AppAnalysisViewSet, \ - LanguageTypeViewSet, LanguageViewSet, AnnouncementViewSet - -from dojo.models import \ - Development_Environment, Endpoint_Status, Endpoint, Engagement, Finding_Template, \ - Finding, JIRA_Instance, JIRA_Issue, DojoMeta, Note_Type, Notes, Product_Type, Product, Regulation, \ - Sonarqube_Issue, Product_API_Scan_Configuration, Sonarqube_Issue_Transition, \ - Stub_Finding, System_Settings, Test_Type, Test, Tool_Configuration, Tool_Product_Settings, \ - Tool_Type, Dojo_User, JIRA_Project, App_Analysis, Language_Type, Languages, Announcement - -from dojo.api_v2.serializers import \ - DevelopmentEnvironmentSerializer, EndpointStatusSerializer, EndpointSerializer, \ - EngagementSerializer, FindingTemplateSerializer, FindingSerializer, \ - JIRAInstanceSerializer, JIRAIssueSerializer, JIRAProjectSerializer, MetaSerializer, NoteTypeSerializer, \ - ProductSerializer, RegulationSerializer, \ - SonarqubeIssueSerializer, ProductAPIScanConfigurationSerializer, SonarqubeIssueTransitionSerializer, \ - StubFindingSerializer, SystemSettingsSerializer, TestTypeSerializer, TestSerializer, ToolConfigurationSerializer, \ - ToolProductSettingsSerializer, ToolTypeSerializer, UserSerializer, NoteSerializer, ProductTypeSerializer, \ - AppAnalysisSerializer, LanguageTypeSerializer, LanguageSerializer, AnnouncementSerializer - -SWAGGER_SCHEMA_GENERATOR = OpenAPISchemaGenerator(Info("defectdojo", "v2")) -BASE_API_URL = "/api/v2" - - -def testIsBroken(method): - return tag("broken")(method) - - -def skipIfNotSubclass(baseclass): - def decorate(f): - def wrapper(self, *args, **kwargs): - if not issubclass(self.viewset, baseclass): - self.skipTest('This view is not %s' % baseclass) - else: - f(self, *args, **kwargs) - return wrapper - return decorate - - -def check_response_valid(expected_code, response): - def _data_to_str(response): - if hasattr(response, "data"): - return response.data - return None - - assert response.status_code == expected_code, \ - f"Response invalid, returned with code {response.status_code}\nResponse Data:\n{_data_to_str(response)}" - - -def format_url(path): - return f"{BASE_API_URL}{path}" - - -class SchemaChecker(): - def __init__(self, definitions): - self._prefix = [] - self._has_failed = False - self._definitions = definitions - self._errors = [] - - def _register_error(self, error): - self._errors += [error] - - def _check_or_fail(self, condition, message): - if not condition: - self._has_failed = True - self._register_error(message) - - def _get_prefix(self): - return '#'.join(self._prefix) - - def _push_prefix(self, prefix): - self._prefix += [prefix] - - def _pop_prefix(self): - self._prefix = self._prefix if len(self._prefix) == 0 else self._prefix[:-1] - - def _resolve_if_ref(self, schema): - if type(schema) is not SchemaRef: - return schema - - ref_name = schema["$ref"] - ref_name = ref_name[ref_name.rfind("/") + 1:] - return self._definitions[ref_name] - - def _check_has_required_fields(self, required_fields, obj): - for required_field in required_fields: - # passwords are writeOnly, but this is not supported by Swagger / OpenAPIv2 - if required_field != 'password': - field = f"{self._get_prefix()}#{required_field}" - self._check_or_fail(obj is not None and required_field in obj, f"{field} is required but was not returned") - - def _check_type(self, schema, obj): - schema_type = schema["type"] - is_nullable = schema.get("x-nullable", False) or schema.get("readOnly", False) - - def _check_helper(check): - self._check_or_fail(check, f"{self._get_prefix()} should be of type {schema_type} but value was of type {type(obj)}") - - if obj is None: - self._check_or_fail(is_nullable, f"{self._get_prefix()} is not nullable yet the value returned was null") - elif schema_type is TYPE_BOOLEAN: - _check_helper(isinstance(obj, bool)) - elif schema_type is TYPE_INTEGER: - _check_helper(isinstance(obj, int)) - elif schema_type is TYPE_NUMBER: - _check_helper(obj.isdecimal()) - elif schema_type is TYPE_ARRAY: - _check_helper(isinstance(obj, list)) - elif schema_type is TYPE_OBJECT: - _check_helper(isinstance(obj, OrderedDict) or isinstance(obj, dict)) - elif schema_type is TYPE_STRING: - _check_helper(isinstance(obj, str)) - else: - # Default case - _check_helper(False) - - def _with_prefix(self, prefix, callable, *args): - self._push_prefix(prefix) - callable(*args) - self._pop_prefix() - - def check(self, schema, obj): - def _check(schema, obj): - schema = self._resolve_if_ref(schema) - self._check_type(schema, obj) - - required_fields = schema.get("required", []) - self._check_has_required_fields(required_fields, obj) - - if obj is None: - return - - properties = schema.get("properties", None) - if properties is not None: - for name, prop in properties.items(): - # print('property: ', name) - # print('obj ', obj) - obj_child = obj.get(name, None) - if obj_child is not None: - self._with_prefix(name, _check, prop, obj_child) - - for child_name in obj.keys(): - # TODO prefetch mixins not picked up by spectcular? - if child_name not in ['prefetch']: - if not properties or child_name not in properties.keys(): - self._has_failed = True - self._register_error(f'unexpected property "{child_name}" found') - - additional_properties = schema.get("additionalProperties", None) - if additional_properties is not None: - for name, obj_child in obj.items(): - self._with_prefix(f"additionalProp<{name}>", _check, additional_properties, obj_child) - - if schema["type"] is TYPE_ARRAY: - items_schema = schema["items"] - for index in range(len(obj)): - self._with_prefix(f"item{index}", _check, items_schema, obj[index]) - - self._has_failed = False - self._errors = [] - self._prefix = [] - _check(schema, obj) - assert not self._has_failed, "\n" + '\n'.join(self._errors) + "\nFailed with " + str(len(self._errors)) + " errors" - - -class BaseClass(): - class SchemaTest(APITestCase): - fixtures = ['dojo_testdata.json'] - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewset = None - self.viewname = None - self.model = None - self.serializer = None - self.field_transformers = dict() - - def setUp(self): - super().setUp() - testuser = Dojo_User.objects.get(username='admin') - - factory = APIRequestFactory() - request = factory.get('/') - force_authenticate(request, user=testuser) - request = APIView().initialize_request(request) - - self.schema = SWAGGER_SCHEMA_GENERATOR.get_schema(request, public=True) - self.client = APIClient() - self.client.force_authenticate(user=testuser) - - def check_schema(self, schema, obj): - schema_checker = SchemaChecker(self.schema["definitions"]) - # print(vars(schema_checker)) - schema_checker.check(schema, obj) - - def get_valid_object_id(self): - response = self.client.get(format_url(f"/{self.viewname}/")) - check_response_valid(status.HTTP_200_OK, response) - if len(response.data["results"]) == 0: - return None - - return response.data["results"][0].get('id', None) - - def get_endpoint_schema(self, path, method): - paths = self.schema["paths"] - methods = paths.get(path, None) - assert methods is not None, f"{path} not found in {[path for path in paths.keys()]}" - - endpoint = methods.get(method, None) - assert endpoint is not None, f"Method {method} not found in {[method for method in methods.keys()]}" - - return endpoint - - def construct_response_data(self, obj_id): - obj = self.model.objects.get(id=obj_id) - request = APIView().initialize_request(APIRequestFactory().request()) - serialized_obj = self.serializer(context={"request": request}).to_representation(obj) - - for name, transformer in self.field_transformers.items(): - serialized_obj[name] = transformer(serialized_obj[name]) - - return serialized_obj - - @skipIfNotSubclass(ListModelMixin) - def test_list_endpoint(self, extra_args=None): - endpoints = self.schema["paths"][f"/{self.viewname}/"] - response = self.client.get(format_url(f"/{self.viewname}/"), extra_args) - check_response_valid(status.HTTP_200_OK, response) - - schema = endpoints['get']['responses']['200']['schema'] - obj = response.data - - self.check_schema(schema, obj) - - @skipIfNotSubclass(RetrieveModelMixin) - def test_retrieve_endpoint(self, extra_args=None): - endpoints = self.schema["paths"][f"/{self.viewname}/{{id}}/"] - response = self.client.get(format_url(f"/{self.viewname}/")) - check_response_valid(status.HTTP_200_OK, response) - ids = [obj['id'] for obj in response.data["results"]] - - schema = endpoints['get']['responses']['200']['schema'] - for id in ids: - print('id:', id) - response = self.client.get(format_url(f"/{self.viewname}/{id}/"), extra_args) - print('response type:', type(response)) - print('response data:', response.data) - check_response_valid(status.HTTP_200_OK, response) - obj = response.data - self.check_schema(schema, obj) - - @skipIfNotSubclass(UpdateModelMixin) - def test_patch_endpoint(self, extra_args=None): - operation = self.schema["paths"][f"/{self.viewname}/{{id}}/"]["patch"] - - id = self.get_valid_object_id() - if id is None: - self.skipTest("No data exists to test endpoint") - - data = self.construct_response_data(id) - - schema = operation['responses']['200']['schema'] - response = self.client.patch(format_url(f"/{self.viewname}/{id}/"), data, format='json') - check_response_valid(status.HTTP_200_OK, response) - - obj = response.data - self.check_schema(schema, obj) - - @skipIfNotSubclass(UpdateModelMixin) - def test_put_endpoint(self, extra_data={}, extra_args=None): - operation = self.schema["paths"][f"/{self.viewname}/{{id}}/"]['put'] - - id = self.get_valid_object_id() - if id is None: - self.skipTest("No data exists to test endpoint") - - data = self.construct_response_data(id) - data.update(extra_data) - - schema = operation['responses']['200']['schema'] - response = self.client.put(format_url(f"/{self.viewname}/{id}/"), data, format='json') - check_response_valid(status.HTTP_200_OK, response) - - obj = response.data - self.check_schema(schema, obj) - - @skipIfNotSubclass(CreateModelMixin) - def test_post_endpoint(self, extra_data=[], extra_args=None): - operation = self.schema["paths"][f"/{self.viewname}/"]["post"] - - id = self.get_valid_object_id() - if id is None: - self.skipTest("No data exists to test endpoint") - - data = self.construct_response_data(id) - data.update(extra_data) - - print('data:', data) - - schema = operation['responses']['201']['schema'] - response = self.client.post(format_url(f"/{self.viewname}/"), data, format='json') - check_response_valid(status.HTTP_201_CREATED, response) - - print('response.data:', response.data) - - obj = response.data - self.check_schema(schema, obj) - - -class DevelopmentEnvironmentTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "development_environments" - self.viewset = DevelopmentEnvironmentViewSet - self.model = Development_Environment - self.serializer = DevelopmentEnvironmentSerializer - - -# Test will only work when FEATURE_AUTHENTICATION_V2 is the default -# class DojoGroupTest(BaseClass.SchemaTest): -# def __init__(self, *args, **kwargs): -# super().__init__(*args, **kwargs) -# self.viewname = "group" -# self.viewset = DojoGroupViewSet -# self.model = Dojo_Group -# self.serializer = DojoGroupSerializer - - -class EndpointStatusTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "endpoint_status" - self.viewset = EndpointStatusViewSet - self.model = Endpoint_Status - self.serializer = EndpointStatusSerializer - - # We can not simulate creating of the endpoint-finding relation with the same parameters as existing one. We will use another finding for this case - def test_post_endpoint(self): - super().test_post_endpoint(extra_data={"finding": "3"}) - - -class EndpointTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "endpoints" - self.viewset = EndPointViewSet - self.model = Endpoint - self.serializer = EndpointSerializer - self.field_transformers = { - "path": lambda v: (v if v else '') + "transformed/" - } - - -class EngagementTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "engagements" - self.viewset = EngagementViewSet - self.model = Engagement - self.serializer = EngagementSerializer - - # @testIsBroken - # fixed - def test_accept_risks(self): - operation = self.get_endpoint_schema("/engagements/{id}/accept_risks/", "post") - schema = operation['responses']['201']['schema'] - print(schema) - id = self.get_valid_object_id() - if id is None: - self.skipTest("No data exists to test endpoint") - - data = [ - { - "vulnerability_id": 1, - "justification": "test", - "accepted_by": "2" - } - ] - - response = self.client.post(format_url(f"/engagements/{id}/accept_risks/"), data, format='json') - check_response_valid(201, response) - print('response.data') - # print(vars(response)) - print(response.content) - obj = response.data - self.check_schema(schema, obj) - - # fixed - def test_notes_read(self): - operation = self.get_endpoint_schema("/engagements/{id}/notes/", "get") - schema = operation['responses']['200']['schema'] - id = self.get_valid_object_id() - if id is None: - self.skipTest("No data exists to test endpoint") - - response = self.client.get(format_url(f"/engagements/{id}/notes/")) - check_response_valid(200, response) - obj = response.data - self.check_schema(schema, obj) - - # fixed - def test_notes_create(self): - operation = self.get_endpoint_schema("/engagements/{id}/notes/", "post") - schema = operation['responses']['201']['schema'] - id = self.get_valid_object_id() - if id is None: - self.skipTest("No data exists to test endpoint") - - data = { - "entry": "test", - "author": 2, - } - - response = self.client.post(format_url(f"/engagements/{id}/notes/"), data, format='json') - check_response_valid(201, response) - obj = response.data - self.check_schema(schema, obj) - - -class FindingTemplateTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "finding_templates" - self.viewset = FindingTemplatesViewSet - self.model = Finding_Template - self.serializer = FindingTemplateSerializer - - # fixed - def test_post_endpoint(self): - super().test_post_endpoint() - - # fixed - def test_patch_endpoint(self): - super().test_patch_endpoint() - - # fixed - def test_put_endpoint(self): - super().test_put_endpoint() - - -class FindingTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "findings" - self.viewset = FindingViewSet - self.model = Finding - self.serializer = FindingSerializer - - # fixed - def test_list_endpoint(self): - super().test_list_endpoint({ - "related_fields": True - }) - - # fixed - def test_patch_endpoint(self): - super().test_patch_endpoint() - - # fixed - def test_put_endpoint(self): - super().test_put_endpoint() - - # fixed - def test_retrieve_endpoint(self): - super().test_retrieve_endpoint({ - "related_fields": True - }) - - -class JiraInstanceTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "jira_instances" - self.viewset = JiraInstanceViewSet - self.model = JIRA_Instance - self.serializer = JIRAInstanceSerializer - - # fixed - def test_list_endpoint(self): - super().test_list_endpoint() - - # fixed - def test_patch_endpoint(self): - super().test_patch_endpoint() - - # fixed - def test_put_endpoint(self): - super().test_put_endpoint(extra_data={"password": "12345"}) - - # fixed - def test_retrieve_endpoint(self): - super().test_retrieve_endpoint() - - # fixed - def test_post_endpoint(self): - super().test_post_endpoint(extra_data={"password": "12345"}) - - -class JiraFindingMappingsTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "jira_finding_mappings" - self.viewset = JiraIssuesViewSet - self.model = JIRA_Issue - self.serializer = JIRAIssueSerializer - self.field_transformers = { - "finding": lambda v: 3, - } - - -class JiraProjectTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "jira_projects" - self.viewset = JiraProjectViewSet - self.model = JIRA_Project - self.serializer = JIRAProjectSerializer - - -class MetadataTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "metadata" - self.viewset = DojoMetaViewSet - self.model = DojoMeta - self.serializer = MetaSerializer - - -class NoteTypeTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "note_type" - self.viewset = NoteTypeViewSet - self.model = Note_Type - self.serializer = NoteTypeSerializer - self.field_transformers = { - "name": lambda v: v + "_new" - } - - -class NoteTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "notes" - self.viewset = NotesViewSet - self.model = Notes - self.serializer = NoteSerializer - - -class ProductTypeTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "product_types" - self.viewset = ProductTypeViewSet - self.model = Product_Type - self.serializer = ProductTypeSerializer - self.field_transformers = { - "name": lambda v: v + "_new" - } - - -# Test will only work when FEATURE_AUTHENTICATION_V2 is the default -# class ProductTypeMemberTest(BaseClass.SchemaTest): -# def __init__(self, *args, **kwargs): -# super().__init__(*args, **kwargs) -# self.viewname = "product_type_members" -# self.viewset = ProductTypeMemberViewSet -# self.model = Product_Type_Member -# self.serializer = ProductTypeMemberSerializer - - -# Test will only work when FEATURE_AUTHENTICATION_V2 is the default -# class ProductTypeGroupTest(BaseClass.SchemaTest): -# def __init__(self, *args, **kwargs): -# super().__init__(*args, **kwargs) -# self.viewname = "product_type_groups" -# self.viewset = ProductTypeGroupViewSet -# self.model = Product_Type_Group -# self.serializer = ProductTypeGroupSerializer - - -class ProductTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "products" - self.viewset = ProductViewSet - self.model = Product - self.serializer = ProductSerializer - self.field_transformers = { - "name": lambda v: v + "_new" - } - - # fixed - def test_list_endpoint(self): - super().test_list_endpoint() - - # fixed - def test_patch_endpoint(self): - super().test_patch_endpoint() - - # fixed - def test_put_endpoint(self): - super().test_put_endpoint() - - # fixed - def test_retrieve_endpoint(self): - super().test_retrieve_endpoint() - - # fixed - def test_post_endpoint(self): - super().test_post_endpoint() - - -# Test will only work when FEATURE_AUTHENTICATION_V2 is the default -# class ProductMemberTest(BaseClass.SchemaTest): -# def __init__(self, *args, **kwargs): -# super().__init__(*args, **kwargs) -# self.viewname = "product_members" -# self.viewset = ProductMemberViewSet -# self.model = Product_Member -# self.serializer = ProductMemberSerializer - -# @testIsBroken -# def test_post_endpoint(self): -# super().test_post_endpoint() - -# @testIsBroken -# def test_patch_endpoint(self): -# super().test_post_endpoint() - - -# Test will only work when FEATURE_AUTHENTICATION_V2 is the default -# class ProductGroupTest(BaseClass.SchemaTest): -# def __init__(self, *args, **kwargs): -# super().__init__(*args, **kwargs) -# self.viewname = "product_groups" -# self.viewset = ProductGroupViewSet -# self.model = Product_Group -# self.serializer = ProductGroupSerializer - - -class RegulationTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "regulations" - self.viewset = RegulationsViewSet - self.model = Regulation - self.serializer = RegulationSerializer - - -class SonarqubeIssuesTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "sonarqube_issues" - self.viewset = SonarqubeIssueViewSet - self.model = Sonarqube_Issue - self.serializer = SonarqubeIssueSerializer - self.field_transformers = { - "key": lambda v: v + "_new" - } - - -class ProductAPIScanConfigurationTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "product_api_scan_configurations" - self.viewset = ProductAPIScanConfigurationViewSet - self.model = Product_API_Scan_Configuration - self.serializer = ProductAPIScanConfigurationSerializer - - -class SonarqubeTransitionTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "sonarqube_transitions" - self.viewset = SonarqubeIssueTransitionViewSet - self.model = Sonarqube_Issue_Transition - self.serializer = SonarqubeIssueTransitionSerializer - - -class StubFindingTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "stub_findings" - self.viewset = StubFindingsViewSet - self.model = Stub_Finding - self.serializer = StubFindingSerializer - - -class SystemSettingTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "system_settings" - self.viewset = SystemSettingsViewSet - self.model = System_Settings - self.serializer = SystemSettingsSerializer - - -class AppAnalysisTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "technologies" - self.viewset = AppAnalysisViewSet - self.model = App_Analysis - self.serializer = AppAnalysisSerializer - - # fixed - def test_patch_endpoint(self): - super().test_patch_endpoint() - - # fixed - def test_put_endpoint(self): - super().test_put_endpoint() - - # fixed - def test_post_endpoint(self): - super().test_post_endpoint() - - -class TestTypeTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "test_types" - self.viewset = TestTypesViewSet - self.model = Test_Type - self.serializer = TestTypeSerializer - self.field_transformers = { - "name": lambda v: v + "_new" - } - - -class TestsTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "tests" - self.viewset = TestsViewSet - self.model = Test - self.serializer = TestSerializer - - -class ToolConfigurationTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "tool_configurations" - self.viewset = ToolConfigurationsViewSet - self.model = Tool_Configuration - self.serializer = ToolConfigurationSerializer - - -class ToolProductSettingTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "tool_product_settings" - self.viewset = ToolProductSettingsViewSet - self.model = Tool_Product_Settings - self.serializer = ToolProductSettingsSerializer - - -class ToolTypeTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "tool_types" - self.viewset = ToolTypesViewSet - self.model = Tool_Type - self.serializer = ToolTypeSerializer - self.field_transformers = { - "name": lambda v: v + "_new" - } - - -class UserTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "users" - self.viewset = UsersViewSet - self.model = Dojo_User - self.serializer = UserSerializer - self.field_transformers = { - "username": lambda v: v + "_transformed" - } - - -class LanguageTypeTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "language_types" - self.viewset = LanguageTypeViewSet - self.model = Language_Type - self.serializer = LanguageTypeSerializer - - -class LanguageTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "languages" - self.viewset = LanguageViewSet - self.model = Languages - self.serializer = LanguageSerializer - - def test_post_endpoint(self): - super().test_post_endpoint(extra_data={"language": 2}) - - -class AnnouncementTest(BaseClass.SchemaTest): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.viewname = "announcements" - self.viewset = AnnouncementViewSet - self.model = Announcement - self.serializer = AnnouncementSerializer - - def test_post_endpoint(self, extra_data=[], extra_args=None): - self.skipTest('Only one Announcement can exists') From c0ac8825aba60dea049b92d951dceb4b7fa9a509 Mon Sep 17 00:00:00 2001 From: kiblik Date: Thu, 15 Feb 2024 23:47:23 +0000 Subject: [PATCH 27/34] Drop filterwarnings "unclosed file" (#9498) --- dojo/settings/settings.dist.py | 1 - 1 file changed, 1 deletion(-) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 5c86f79ddfd..c07b48e09f0 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1690,4 +1690,3 @@ def saml2_attrib_map_format(dict): warnings.filterwarnings("ignore", category=RemovedInDjango50Warning) warnings.filterwarnings("ignore", message="invalid escape sequence.*") warnings.filterwarnings("ignore", message="DateTimeField .+ received a naive datetime .+ while time zone support is active\\.") - warnings.filterwarnings("ignore", message="unclosed file .+") From c1819a1642bbc92eef9d0a551834c583e9ffede6 Mon Sep 17 00:00:00 2001 From: manuelsommer <47991713+manuel-sommer@users.noreply.github.com> Date: Fri, 16 Feb 2024 00:51:15 +0100 Subject: [PATCH 28/34] :bug: WFuzz: Generalize severity mapping (#9505) * :bug: fix wfuzz 301, issue 6182 * make severity mapper more robust * unittest for missing response code * update docs --- .../en/integrations/parsers/file/wfuzz.md | 12 +++---- dojo/tools/wfuzz/parser.py | 33 ++++++++++--------- .../one_finding_responsecode_missing.json | 13 ++++++++ unittests/tools/test_wfuzz_parser.py | 9 +++++ 4 files changed, 44 insertions(+), 23 deletions(-) create mode 100644 unittests/scans/wfuzz/one_finding_responsecode_missing.json diff --git a/docs/content/en/integrations/parsers/file/wfuzz.md b/docs/content/en/integrations/parsers/file/wfuzz.md index 1893c359bd2..b76c7b186eb 100644 --- a/docs/content/en/integrations/parsers/file/wfuzz.md +++ b/docs/content/en/integrations/parsers/file/wfuzz.md @@ -8,13 +8,11 @@ The return code matching are directly put in Severity as follow(this is hardcode HTTP Return Code | Severity -----------------|--------- -200 | High -302 | Low -401 | Medium -403 | Medium -404 | Medium -407 | Medium -500 | Low +missing | Low +200 - 299 | High +300 - 399 | Low +400 - 499 | Medium +>= 500 | Low ### Sample Scan Data Sample Wfuzz JSON importer scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/wfuzz). \ No newline at end of file diff --git a/dojo/tools/wfuzz/parser.py b/dojo/tools/wfuzz/parser.py index a19cd869bd8..eb6b3186694 100644 --- a/dojo/tools/wfuzz/parser.py +++ b/dojo/tools/wfuzz/parser.py @@ -10,16 +10,16 @@ class WFuzzParser(object): A class that can be used to parse the WFuzz JSON report files """ - # table to match HTTP error code and severity - SEVERITY = { - "200": "High", - "302": "Low", - "401": "Medium", - "403": "Medium", - "404": "Medium", - "407": "Medium", - "500": "Low" - } + # match HTTP error code and severity + def severity_mapper(self, input): + if 200 <= int(input) <= 299: + return "High" + elif 300 <= int(input) <= 399: + return "Low" + elif 400 <= int(input) <= 499: + return "Medium" + elif 500 <= int(input): + return "Low" def get_scan_types(self): return ["WFuzz JSON report"] @@ -32,16 +32,17 @@ def get_description_for_scan_types(self, scan_type): def get_findings(self, filename, test): data = json.load(filename) - dupes = {} for item in data: url = hyperlink.parse(item["url"]) - return_code = str(item["code"]) - severity = self.SEVERITY[return_code] + return_code = item.get("code", None) + if return_code is None: + severity = "Low" + else: + severity = self.severity_mapper(input=return_code) description = f"The URL {url.to_text()} must not be exposed\n Please review your configuration\n" - dupe_key = hashlib.sha256( - (url.to_text() + return_code).encode("utf-8") + (url.to_text() + str(return_code)).encode("utf-8") ).hexdigest() if dupe_key in dupes: @@ -68,7 +69,7 @@ def get_findings(self, filename, test): ) ] finding.unsaved_req_resp = [ - {"req": item["payload"], "resp": str(item["code"])} + {"req": item["payload"], "resp": str(return_code)} ] dupes[dupe_key] = finding return list(dupes.values()) diff --git a/unittests/scans/wfuzz/one_finding_responsecode_missing.json b/unittests/scans/wfuzz/one_finding_responsecode_missing.json new file mode 100644 index 00000000000..ca120d9d17b --- /dev/null +++ b/unittests/scans/wfuzz/one_finding_responsecode_missing.json @@ -0,0 +1,13 @@ +[ + { + "chars": 2823, + "payload": "/server-status | GET /server-status HTTP/1.1\nContent-Type: application/x-www-form-urlencoded\nUser-Agent: Wfuzz/3.1.0\nHost: example.com\n\n", + "lines": 0, + "location": "", + "method": "GET", + "post_data": [], + "server": "", + "url": "https://example.com/server-status", + "words": 60 + } +] \ No newline at end of file diff --git a/unittests/tools/test_wfuzz_parser.py b/unittests/tools/test_wfuzz_parser.py index ef826921f9d..05eb69eebad 100644 --- a/unittests/tools/test_wfuzz_parser.py +++ b/unittests/tools/test_wfuzz_parser.py @@ -47,3 +47,12 @@ def test_issue_7863(self): endpoint.clean() self.assertEqual(1, len(findings)) self.assertEqual("Medium", findings[0].severity) + + def test_one_finding_responsecode_missing(self): + testfile = open("unittests/scans/wfuzz/one_finding_responsecode_missing.json") + parser = WFuzzParser() + findings = parser.get_findings(testfile, Test()) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(1, len(findings)) From 89cefc7f4b6e762be1422ee06e6f0f7afc2fe2f5 Mon Sep 17 00:00:00 2001 From: kiblik Date: Thu, 15 Feb 2024 23:51:33 +0000 Subject: [PATCH 29/34] Remove useless noqa, be more specific for usefull noqa (#9510) --- dojo/__init__.py | 2 +- dojo/apps.py | 8 ++++---- tests/dedupe_test.py | 3 +-- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/dojo/__init__.py b/dojo/__init__.py index f1c39c15ed1..d258073d6a8 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -2,7 +2,7 @@ # This will make sure the app is always imported when # Django starts so that shared_task will use this app. -from .celery import app as celery_app # noqa +from .celery import app as celery_app # noqa: F401 __version__ = '2.32.0-dev' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' diff --git a/dojo/apps.py b/dojo/apps.py index 6c84a420de8..693d0328dea 100644 --- a/dojo/apps.py +++ b/dojo/apps.py @@ -71,10 +71,10 @@ def ready(self): # Load any signals here that will be ready for runtime # Importing the signals file is good enough if using the reciever decorator - import dojo.announcement.signals # noqa - import dojo.product.signals # noqa - import dojo.test.signals # noqa - import dojo.sla_config.helpers # noqa + import dojo.announcement.signals # noqa: F401 + import dojo.product.signals # noqa: F401 + import dojo.test.signals # noqa: F401 + import dojo.sla_config.helpers # noqa: F401 def get_model_fields_with_extra(model, extra_fields=()): diff --git a/tests/dedupe_test.py b/tests/dedupe_test.py index 1199159dba2..92f97f7891e 100644 --- a/tests/dedupe_test.py +++ b/tests/dedupe_test.py @@ -95,12 +95,11 @@ def test_delete_findings(self): # check that user was redirect back to url where it came from based on return_url self.assertTrue(driver.current_url.endswith('page=1')) - # -------------------------------------------------------------------------------------------------------- # Same scanner deduplication - Deduplication on engagement # Test deduplication for Bandit SAST scanner # -------------------------------------------------------------------------------------------------------- - @on_exception_html_source_logger # noqa: E301 + @on_exception_html_source_logger def test_add_path_test_suite(self): logger.debug("Same scanner deduplication - Deduplication on engagement - static. Creating tests...") # Create engagement From 1436db26204ac2ac389e37f478665c46cb3fcf93 Mon Sep 17 00:00:00 2001 From: manuelsommer <47991713+manuel-sommer@users.noreply.github.com> Date: Fri, 16 Feb 2024 00:54:17 +0100 Subject: [PATCH 30/34] :sparkles: add burp dastardly (#9514) * :sparkles: add burp dastardly * fix author names * fix unittest * add docs --- .../parsers/file/burp_dastardly.md | 11 + dojo/tools/burp_dastardly/__init__.py | 1 + dojo/tools/burp_dastardly/parser.py | 49 ++ dojo/tools/chefinspect/__init__.py | 2 +- dojo/tools/gcloud_artifact_scan/__init__.py | 2 +- dojo/tools/hcl_appscan/__init__.py | 2 +- dojo/tools/humble/__init__.py | 2 +- dojo/tools/kubeaudit/__init__.py | 2 +- dojo/tools/ms_defender/__init__.py | 2 +- dojo/tools/openvas/__init__.py | 2 +- dojo/tools/redhatsatellite/__init__.py | 2 +- dojo/tools/ssh_audit/__init__.py | 2 +- .../scans/burp_dastardly/many_findings.xml | 686 ++++++++++++++++++ unittests/tools/test_burp_dastardly_parser.py | 17 + 14 files changed, 773 insertions(+), 9 deletions(-) create mode 100644 docs/content/en/integrations/parsers/file/burp_dastardly.md create mode 100644 dojo/tools/burp_dastardly/__init__.py create mode 100755 dojo/tools/burp_dastardly/parser.py create mode 100644 unittests/scans/burp_dastardly/many_findings.xml create mode 100644 unittests/tools/test_burp_dastardly_parser.py diff --git a/docs/content/en/integrations/parsers/file/burp_dastardly.md b/docs/content/en/integrations/parsers/file/burp_dastardly.md new file mode 100644 index 00000000000..418ad5f7861 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/burp_dastardly.md @@ -0,0 +1,11 @@ +--- +title: "Burp Dastardly" +toc_hide: true +--- +### File Types +DefectDojo parser accepts Burp Dastardly Scans as an XML output. + +Dastardly is a free, lightweight web application security scanner for your CI/CD pipeline. It is designed specifically for web developers, and checks your application for seven security issues that are likely to interest you during software development. Dastardly is based on the same scanner as Burp Suite (Burp Scanner). + +### Sample Scan Data +Sample Burp Dastardly scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/burp_dastardly). \ No newline at end of file diff --git a/dojo/tools/burp_dastardly/__init__.py b/dojo/tools/burp_dastardly/__init__.py new file mode 100644 index 00000000000..3ad798a42b3 --- /dev/null +++ b/dojo/tools/burp_dastardly/__init__.py @@ -0,0 +1 @@ +__author__ = "manuel-sommer" diff --git a/dojo/tools/burp_dastardly/parser.py b/dojo/tools/burp_dastardly/parser.py new file mode 100755 index 00000000000..e546c83978c --- /dev/null +++ b/dojo/tools/burp_dastardly/parser.py @@ -0,0 +1,49 @@ +import logging +from defusedxml import ElementTree as etree +from dojo.models import Finding + +logger = logging.getLogger(__name__) + + +class BurpDastardlyParser(object): + + def get_scan_types(self): + return ["Burp Dastardly Scan"] + + def get_label_for_scan_types(self, scan_type): + return "Burp Dastardly Scan" + + def get_description_for_scan_types(self, scan_type): + return ( + "Import Burp Dastardly XML files." + ) + + def get_findings(self, xml_output, test): + tree = etree.parse(xml_output, etree.XMLParser()) + return self.get_items(tree, test) + + def get_items(self, tree, test): + items = list() + for node in tree.findall("testsuite"): + if int(node.attrib["failures"]) != 0: + name = node.attrib["name"] + testcase = node.findall("testcase") + for case in testcase: + for fail in case.findall("failure"): + title = fail.attrib["message"] + severity = fail.attrib["type"] + description = fail.text + finding = Finding( + title=title, + url=name, + test=test, + severity=severity, + description=description, + false_p=False, + duplicate=False, + out_of_scope=False, + mitigated=None, + dynamic_finding=True, + ) + items.append(finding) + return items diff --git a/dojo/tools/chefinspect/__init__.py b/dojo/tools/chefinspect/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/chefinspect/__init__.py +++ b/dojo/tools/chefinspect/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/dojo/tools/gcloud_artifact_scan/__init__.py b/dojo/tools/gcloud_artifact_scan/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/gcloud_artifact_scan/__init__.py +++ b/dojo/tools/gcloud_artifact_scan/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/dojo/tools/hcl_appscan/__init__.py b/dojo/tools/hcl_appscan/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/hcl_appscan/__init__.py +++ b/dojo/tools/hcl_appscan/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/dojo/tools/humble/__init__.py b/dojo/tools/humble/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/humble/__init__.py +++ b/dojo/tools/humble/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/dojo/tools/kubeaudit/__init__.py b/dojo/tools/kubeaudit/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/kubeaudit/__init__.py +++ b/dojo/tools/kubeaudit/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/dojo/tools/ms_defender/__init__.py b/dojo/tools/ms_defender/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/ms_defender/__init__.py +++ b/dojo/tools/ms_defender/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/dojo/tools/openvas/__init__.py b/dojo/tools/openvas/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/openvas/__init__.py +++ b/dojo/tools/openvas/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/dojo/tools/redhatsatellite/__init__.py b/dojo/tools/redhatsatellite/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/redhatsatellite/__init__.py +++ b/dojo/tools/redhatsatellite/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/dojo/tools/ssh_audit/__init__.py b/dojo/tools/ssh_audit/__init__.py index 99e8e118c6a..3ad798a42b3 100644 --- a/dojo/tools/ssh_audit/__init__.py +++ b/dojo/tools/ssh_audit/__init__.py @@ -1 +1 @@ -__author__ = "manuel_sommer" +__author__ = "manuel-sommer" diff --git a/unittests/scans/burp_dastardly/many_findings.xml b/unittests/scans/burp_dastardly/many_findings.xml new file mode 100644 index 00000000000..f523de2166d --- /dev/null +++ b/unittests/scans/burp_dastardly/many_findings.xml @@ -0,0 +1,686 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + " ' and =, should be +replaced with the corresponding HTML entities (< > etc). + +In cases where the application's functionality allows users to author content using +a restricted subset of HTML tags and attributes (for example, blog comments which +allow limited formatting and linking), it is necessary to parse the supplied HTML to +validate that it does not use any dangerous syntax; this is a non-trivial task. + + +Evidence +Request: +GET /catalog?searchTerm=QvfSPO99978%5c'%3balert(1)%2f%2f115 HTTP/2 +Host: ginandjuice.shop +Accept-Encoding: gzip, deflate, br +Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7 +Accept-Language: en-US;q=0.9,en;q=0.8 +User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.6099.199 Safari/537.36 +Connection: close +Cache-Control: max-age=0 +Cookie: session=m01R2FJYZ5xZhsAgCecobW9jKERpfDf5; AWSALB=I7YjoOSZmfadwp1KVWEh0t3OUUDp2fBN05Hv2b3PXPKyFnk1cTrwlKCLLuHwQfLiZfa02utPSwdLEmGVPeNV8ZBG+8XvlhHWlYef+ELVQB9nl1esg+wKts2aA6/G; AWSALBCORS=I7YjoOSZmfadwp1KVWEh0t3OUUDp2fBN05Hv2b3PXPKyFnk1cTrwlKCLLuHwQfLiZfa02utPSwdLEmGVPeNV8ZBG+8XvlhHWlYef+ELVQB9nl1esg+wKts2aA6/G; TrackingId=eyJ0eXBlIjoiY2xhc3MiLCJ2YWx1ZSI6InlWMXRmeXBUVnJkeWZEWDMifQ== +Upgrade-Insecure-Requests: 1 +Referer: https://ginandjuice.shop/catalog +Sec-CH-UA: ".Not/A)Brand";v="99", "Google Chrome";v="120", "Chromium";v="120" +Sec-CH-UA-Platform: Windows +Sec-CH-UA-Mobile: ?0 +Content-Length: 0 + + + +Response: +HTTP/2 200 OK +Date: Tue, 30 Jan 2024 09:17:14 GMT +Content-Type: text/html; charset=utf-8 +Content-Length: 9540 +Set-Cookie: AWSALB=ooUPTWmLTojp4gTNDd9biV2wheWfG9Ck5efAhh7jiVuPvGBZSYZnZdSFHG62EQGZY1d+VbYFQ0ml6QPXmD+rIhMALz2JcHLV+0VQ9U50pcQaucynSxHL5phl214T; Expires=Tue, 06 Feb 2024 09:17:14 GMT; Path=/ +Set-Cookie: AWSALBCORS=ooUPTWmLTojp4gTNDd9biV2wheWfG9Ck5efAhh7jiVuPvGBZSYZnZdSFHG62EQGZY1d+VbYFQ0ml6QPXmD+rIhMALz2JcHLV+0VQ9U50pcQaucynSxHL5phl214T; Expires=Tue, 06 Feb 2024 09:17:14 GMT; Path=/; SameSite=None; Secure +X-Backend: b3ea6540-e744-41e1-b964-c58601fd48a2 +X-Frame-Options: SAMEORIGIN + + + + + + + +var searchText = 'QvfSPO99978\\';alert(1)//115'; +document.getElementById('searchBar').value = searchText; +Snip + + + +References + +- Web Security Academy: Cross-site scripting (https://portswigger.net/web-security/cross-site-scripting) + +- Web Security Academy: Reflected cross-site scripting (https://portswigger.net/web-security/cross-site-scripting/reflected) + +- Using Burp to Find XSS issues (https://support.portswigger.net/customer/portal/articles/1965737-Methodology_XSS.html) + + + +Vulnerability Classifications + +- CWE-79: Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting') (https://cwe.mitre.org/data/definitions/79.html) + +- CWE-80: Improper Neutralization of Script-Related HTML Tags in a Web Page (Basic XSS) (https://cwe.mitre.org/data/definitions/80.html) + +- CWE-116: Improper Encoding or Escaping of Output (https://cwe.mitre.org/data/definitions/116.html) + +- CWE-159: Failure to Sanitize Special Element (https://cwe.mitre.org/data/definitions/159.html) + +- CAPEC-591: Reflected XSS (https://capec.mitre.org/data/definitions/591.html) + + + +Reported by Dastardly: https://portswigger.net/burp/dastardly/scan-checks +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + alert(1)l00ng was submitted in the email JSON parameter. This input was echoed unmodified in the application's response. + +This proof-of-concept attack demonstrates that it is possible to inject arbitrary JavaScript into the application's response. + +The request uses a Content-type header which it is not possible to generate using a standard HTML form. Burp attempted to replace this header with a standard value, to facilitate cross-domain delivery of an exploit, but this does not appear to be possible. + +The response does not state that the content type is HTML. The issue is only directly exploitable if a browser can be made to interpret the response as HTML. No modern browser will interpret the response as HTML. However, the issue might be indirectly exploitable if a client-side script processes the response and embeds it into an HTML context. + +Issue Background +Reflected cross-site scripting vulnerabilities arise when data is copied from a request and echoed into the application's immediate response in an unsafe way. An attacker can use the vulnerability to construct a request that, if issued by another application user, will cause JavaScript code supplied by the attacker to execute within the user's browser in the context of that user's session with the application. + +The attacker-supplied code can perform a wide variety of actions, such as stealing the victim's session token or login credentials, performing arbitrary actions on the victim's behalf, and logging their keystrokes. + +Users can be induced to issue the attacker's crafted request in various ways. For example, the attacker can send a victim a link containing a malicious URL in an email or instant message. They can submit the link to popular web sites that allow content authoring, for example in blog comments. And they can create an innocuous looking web site that causes anyone viewing it to make arbitrary cross-domain requests to the vulnerable application (using either the GET or the POST method). + +The security impact of cross-site scripting vulnerabilities is dependent upon the nature of the vulnerable application, the kinds of data and functionality that it contains, and the other applications that belong to the same domain and organization. If the application is used only to display non-sensitive public content, with no authentication or access control functionality, then a cross-site scripting flaw may be considered low risk. However, if the same application resides on a domain that can access cookies for other more security-critical applications, then the vulnerability could be used to attack those other applications, and so may be considered high risk. Similarly, if the organization that owns the application is a likely target for phishing attacks, then the vulnerability could be leveraged to lend credibility to such attacks, by injecting Trojan functionality into the vulnerable application and exploiting users' trust in the organization in order to capture credentials for other applications that it owns. In many kinds of application, such as those providing online banking functionality, cross-site scripting should always be considered high risk. + + +Issue Remediation +In most situations where user-controllable data is copied into application responses, cross-site scripting +attacks can be prevented using two layers of defenses: + + + +- Input should be validated as strictly as possible on arrival, given the kind of content that +it is expected to contain. For example, personal names should consist of alphabetical +and a small range of typographical characters, and be relatively short; a year of birth +should consist of exactly four numerals; email addresses should match a well-defined +regular expression. Input which fails the validation should be rejected, not sanitized. + +- User input should be HTML-encoded at any point where it is copied into +application responses. All HTML metacharacters, including < > " ' and =, should be +replaced with the corresponding HTML entities (< > etc). + +In cases where the application's functionality allows users to author content using +a restricted subset of HTML tags and attributes (for example, blog comments which +allow limited formatting and linking), it is necessary to parse the supplied HTML to +validate that it does not use any dangerous syntax; this is a non-trivial task. + + +Evidence +Request: +POST /catalog/subscribe HTTP/2 +Host: ginandjuice.shop +Accept-Encoding: gzip, deflate, br +Accept: */* +Accept-Language: en-US;q=0.9,en;q=0.8 +User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.6099.199 Safari/537.36 +Connection: close +Cache-Control: max-age=0 +Cookie: AWSALB=orFJl2p99LdDAEMu9mJ41yWmkCMoidYbH60mr/z+RgCGkNaDP+KcXABho4VRQDvhnqfKOCKFrgsiL6HsuqKsh6ZhX+d89EaADJ8cQbOqOyYS3VFn+RdHvKu6173H; AWSALBCORS=orFJl2p99LdDAEMu9mJ41yWmkCMoidYbH60mr/z+RgCGkNaDP+KcXABho4VRQDvhnqfKOCKFrgsiL6HsuqKsh6ZhX+d89EaADJ8cQbOqOyYS3VFn+RdHvKu6173H; session=Ht6HJxIWa8ufROU7GCp9WlqlAOqA0wHr +Origin: https://ginandjuice.shop +Referer: https://ginandjuice.shop/ +Content-Type: application/json;charset=UTF-8 +Sec-CH-UA: ".Not/A)Brand";v="99", "Google Chrome";v="120", "Chromium";v="120" +Sec-CH-UA-Platform: Windows +Sec-CH-UA-Mobile: ?0 +Content-Length: 83 + +{"email":"wPtamgNW@burpcollaborator.netl8diil00ng"} + + + +References + +- Web Security Academy: Cross-site scripting (https://portswigger.net/web-security/cross-site-scripting) + +- Web Security Academy: Reflected cross-site scripting (https://portswigger.net/web-security/cross-site-scripting/reflected) + +- Using Burp to Find XSS issues (https://support.portswigger.net/customer/portal/articles/1965737-Methodology_XSS.html) + + + +Vulnerability Classifications + +- CWE-79: Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting') (https://cwe.mitre.org/data/definitions/79.html) + +- CWE-80: Improper Neutralization of Script-Related HTML Tags in a Web Page (Basic XSS) (https://cwe.mitre.org/data/definitions/80.html) + +- CWE-116: Improper Encoding or Escaping of Output (https://cwe.mitre.org/data/definitions/116.html) + +- CWE-159: Failure to Sanitize Special Element (https://cwe.mitre.org/data/definitions/159.html) + +- CAPEC-591: Reflected XSS (https://capec.mitre.org/data/definitions/591.html) + + + +Reported by Dastardly: https://portswigger.net/burp/dastardly/scan-checks +]]> + + + + + + + + + + + + + + + + + " ' and =, should be +replaced with the corresponding HTML entities (< > etc). + +In cases where the application's functionality allows users to author content using +a restricted subset of HTML tags and attributes (for example, blog comments which +allow limited formatting and linking), it is necessary to parse the supplied HTML to +validate that it does not use any dangerous syntax; this is a non-trivial task. + + +Evidence +Request: +POST /login HTTP/2 +Host: ginandjuice.shop +Accept-Encoding: gzip, deflate, br +Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7 +Accept-Language: en-US;q=0.9,en;q=0.8 +User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.6099.199 Safari/537.36 +Connection: close +Cache-Control: max-age=0 +Cookie: session=YD2NwguI62ebistVMrg3J2Y75NVvJxqo; AWSALB=WiZA+unpp+ZxZrfwCYm939q4xCPjDiZP7hF3CmI0esHdQvNJ/qOqSxyqgZ/LyeN3Zx0WwL1t6r+h9XAS9TBv3vq8w0kBTzSs8OhGM6f6s/J5NPk961Ttp5HtxEJM; AWSALBCORS=WiZA+unpp+ZxZrfwCYm939q4xCPjDiZP7hF3CmI0esHdQvNJ/qOqSxyqgZ/LyeN3Zx0WwL1t6r+h9XAS9TBv3vq8w0kBTzSs8OhGM6f6s/J5NPk961Ttp5HtxEJM +Origin: https://ginandjuice.shop +Upgrade-Insecure-Requests: 1 +Referer: https://ginandjuice.shop/login +Content-Type: application/x-www-form-urlencoded +Sec-CH-UA: ".Not/A)Brand";v="99", "Google Chrome";v="120", "Chromium";v="120" +Sec-CH-UA-Platform: Windows +Sec-CH-UA-Mobile: ?0 +Content-Length: 55 + +csrf=VkUYAv7TZRlP5KkB3rcuQ0x7ygE7Ugrh&username=DjVERldN92891'%3balert(1)%2f%2f714 + +Response: +HTTP/2 200 OK +Date: Tue, 30 Jan 2024 09:18:28 GMT +Content-Type: text/html; charset=utf-8 +Content-Length: 7823 +Set-Cookie: AWSALB=7PrleWn5wj+EGyD19r/OEMjCN1yE9Kw2OXDKkhKu3Xyiq1p9uBAV3pn7Cj63dphv0w6NGXFCYqSVusc092BsVLsklHSb1+m12Tmro7qaded5bDQ8Kdhj2Lgijr10; Expires=Tue, 06 Feb 2024 09:18:28 GMT; Path=/ +Set-Cookie: AWSALBCORS=7PrleWn5wj+EGyD19r/OEMjCN1yE9Kw2OXDKkhKu3Xyiq1p9uBAV3pn7Cj63dphv0w6NGXFCYqSVusc092BsVLsklHSb1+m12Tmro7qaded5bDQ8Kdhj2Lgijr10; Expires=Tue, 06 Feb 2024 09:18:28 GMT; Path=/; SameSite=None; Secure +X-Backend: 05285ce1-97cd-4b05-9fc6-cf93f1cac3f4 +X-Frame-Options: SAMEORIGIN + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + element + + + +- +End-of-Life: Long term support for AngularJS has been discontinued + +https://blog.angular.io/discontinued-long-term-support-for-angularjs-cc066b82e65a?gi=9d3103b5445c (https://blog.angular.io/discontinued-long-term-support-for-angularjs-cc066b82e65a?gi=9d3103b5445c) + + + +- +CVE-2022-25869 (https://nvd.nist.gov/vuln/detail/CVE-2022-25869): Angular (deprecated package) Cross-site Scripting + + + +- +CVE-2022-25844 (https://nvd.nist.gov/vuln/detail/CVE-2022-25844): angular vulnerable to regular expression denial of service (ReDoS) + + + +- +CVE-2023-26116 (https://nvd.nist.gov/vuln/detail/CVE-2023-26116): angular vulnerable to regular expression denial of service via the angular.copy() utility + + + +- +CVE-2023-26117 (https://nvd.nist.gov/vuln/detail/CVE-2023-26117): angular vulnerable to regular expression denial of service via the $resource service + + + + + + + +Issue Background + +The use of third-party JavaScript libraries can introduce a range of DOM-based vulnerabilities, including some that can be used to hijack user accounts like DOM-XSS. + + + + +Common JavaScript libraries typically enjoy the benefit of being heavily audited. This may mean that bugs are quickly identified and patched upstream, resulting in a steady stream of security updates that need to be applied. Although it may be tempting to ignore updates, using a library with missing security patches can make your website exceptionally easy to exploit. Therefore, it's important to ensure that any available security updates are applied promptly. + + + +Some library vulnerabilities expose every application that imports the library, but others only affect applications that use certain library features. Accurately identifying which library vulnerabilities apply to your website can be difficult, so we recommend applying all available security updates regardless. + + + +Issue Remediation +Develop a patch-management strategy to ensure that security updates are promptly applied to all third-party libraries in your application. Also, consider reducing your attack surface by removing any libraries that are no longer in use. + + +Evidence +Request: +GET /resources/js/angular_1-7-7.js HTTP/2 +Host: ginandjuice.shop +Accept-Encoding: gzip, deflate, br +Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7 +Accept-Language: en-US;q=0.9,en;q=0.8 +User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.6099.199 Safari/537.36 +Connection: close +Cache-Control: max-age=0 +Upgrade-Insecure-Requests: 1 +Sec-CH-UA: ".Not/A)Brand";v="99", "Google Chrome";v="120", "Chromium";v="120" +Sec-CH-UA-Platform: Windows +Sec-CH-UA-Mobile: ?0 + + + +Response: +HTTP/2 200 OK +Date: Tue, 30 Jan 2024 09:13:15 GMT +Content-Type: application/javascript; charset=utf-8 +Content-Length: 195161 +Set-Cookie: AWSALB=5wcVdcDBKUZ9ywXfMQ4JayMr81/LcKlHFowhYUkNWdqd/PIqpp4w+A4liG65OtilJq2eYdZGiB+mj3FIgwDkq0reO94m/jveqdB7k6X1OPhzO3AunQhPmO2mp5fE; Expires=Tue, 06 Feb 2024 09:13:15 GMT; Path=/ +Set-Cookie: AWSALBCORS=5wcVdcDBKUZ9ywXfMQ4JayMr81/LcKlHFowhYUkNWdqd/PIqpp4w+A4liG65OtilJq2eYdZGiB+mj3FIgwDkq0reO94m/jveqdB7k6X1OPhzO3AunQhPmO2mp5fE; Expires=Tue, 06 Feb 2024 09:13:15 GMT; Path=/; SameSite=None; Secure +Cache-Control: public, max-age=3600 +X-Backend: 05285ce1-97cd-4b05-9fc6-cf93f1cac3f4 +X-Frame-Options: SAMEORIGIN + +/* +AngularJS v1.7.7 +(c) 2010-2018 Google, Inc. http://angularjs.org +License: MIT +*/ +(function(C){'use strict';function re(a){if(D(a))w(a.objectMaxDepth)&&(Wb.objectMaxDepth=Xb(a.objectMaxDepth)?a.objectMaxDepth:NaN),w(Snip + + + +Vulnerability Classifications + +- CWE-1104: Use of Unmaintained Third Party Components (https://cwe.mitre.org/data/definitions/1104.html) + +- A9: Using Components with Known Vulnerabilities (https://owasp.org/www-project-top-ten/2017/A9_2017-Using_Components_with_Known_Vulnerabilities) + + + +Reported by Dastardly: https://portswigger.net/burp/dastardly/scan-checks +]]> + + + + + + + + + + + + + + + + + + diff --git a/unittests/tools/test_burp_dastardly_parser.py b/unittests/tools/test_burp_dastardly_parser.py new file mode 100644 index 00000000000..9db996a5f2f --- /dev/null +++ b/unittests/tools/test_burp_dastardly_parser.py @@ -0,0 +1,17 @@ +from os import path + +from ..dojo_test_case import DojoTestCase +from dojo.models import Test +from dojo.tools.burp_dastardly.parser import BurpDastardlyParser + + +class TestBurpParser(DojoTestCase): + + def test_burp_dastardly_multiple_findings(self): + with open(path.join(path.dirname(__file__), "../scans/burp_dastardly/many_findings.xml")) as test_file: + parser = BurpDastardlyParser() + findings = parser.get_findings(test_file, Test()) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + self.assertEqual(4, len(findings)) From 96e2a4b824e45fe61437831bfccc698d8aee93bd Mon Sep 17 00:00:00 2001 From: kiblik Date: Fri, 16 Feb 2024 02:41:07 +0000 Subject: [PATCH 31/34] Remove filterwarnings for "invalid escape sequence" (#9496) * Drop filterwarnings "invalid escape sequence" * Fix SyntaxError for special_character_required * Update dojo/utils.py Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> * Update settings.dist.py Fix merge conflict fix --------- Co-authored-by: Charles Neill <1749665+cneill@users.noreply.github.com> Co-authored-by: Matt Tesauro --- dojo/models.py | 2 +- dojo/settings/settings.dist.py | 2 +- dojo/utils.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dojo/models.py b/dojo/models.py index 77dead1482e..1ed97a2b69e 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -547,7 +547,7 @@ class System_Settings(models.Model): default=True, blank=False, verbose_name=_("Password must contain one special character"), - help_text=_("Requires user passwords to contain at least one special character (()[]{}|\`~!@#$%^&*_-+=;:\'\",<>./?).")) # noqa W605 + help_text=_("Requires user passwords to contain at least one special character (()[]{}|\\`~!@#$%^&*_-+=;:\'\",<>./?).")) lowercase_character_required = models.BooleanField( default=True, blank=False, diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index c07b48e09f0..3ee4353d8ee 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1688,5 +1688,5 @@ def saml2_attrib_map_format(dict): if DEBUG: from django.utils.deprecation import RemovedInDjango50Warning warnings.filterwarnings("ignore", category=RemovedInDjango50Warning) - warnings.filterwarnings("ignore", message="invalid escape sequence.*") + warnings.filterwarnings("ignore", message="'cgi' is deprecated and slated for removal in Python 3\\.13") warnings.filterwarnings("ignore", message="DateTimeField .+ received a naive datetime .+ while time zone support is active\\.") diff --git a/dojo/utils.py b/dojo/utils.py index 42334262d94..9ff3c362124 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -2429,7 +2429,7 @@ def get_password_requirements_string(): if bool(get_system_setting('number_character_required')): s += ', one number (0-9)' if bool(get_system_setting('special_character_required')): - s += ', one special chacter (()[]{}|\`~!@#$%^&*_-+=;:\'\",<>./?)' # noqa W605 + s += ', one special character (()[]{}|\\`~!@#$%^&*_-+=;:\'\",<>./?)' if s.count(', ') == 1: password_requirements_string = s.rsplit(', ', 1)[0] + ' and ' + s.rsplit(', ', 1)[1] From f3b409af5af2d38d7c00a983b97f2cd555305bde Mon Sep 17 00:00:00 2001 From: manuelsommer <47991713+manuel-sommer@users.noreply.github.com> Date: Fri, 16 Feb 2024 03:43:16 +0100 Subject: [PATCH 32/34] :bug: fix mobsf deduplication and severity mapping (#9471) * :bug: fix #7936, fix severity mapping * add warning * remove multiple warning replacings * remove replacing --- dojo/settings/settings.dist.py | 2 ++ dojo/tools/mobsf/parser.py | 25 +++++++++++++------------ 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 3ee4353d8ee..c928cd7d172 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1241,6 +1241,7 @@ def saml2_attrib_map_format(dict): 'Humble Json Importer': ['title'], 'MSDefender Parser': ['title', 'description'], 'HCLAppScan XML': ['title', 'description'], + 'MobSF Scan': ['title', 'description', 'severity'], } # Override the hardcoded settings here via the env var @@ -1449,6 +1450,7 @@ def saml2_attrib_map_format(dict): 'Wazuh Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'MSDefender Parser': DEDUPE_ALGO_HASH_CODE, 'HCLAppScan XML': DEDUPE_ALGO_HASH_CODE, + 'MobSF Scan': DEDUPE_ALGO_HASH_CODE, } # Override the hardcoded settings here via the env var diff --git a/dojo/tools/mobsf/parser.py b/dojo/tools/mobsf/parser.py index da355496fd5..4bad5590098 100644 --- a/dojo/tools/mobsf/parser.py +++ b/dojo/tools/mobsf/parser.py @@ -134,7 +134,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Certificate Analysis", "title": details[2], - "severity": details[0].replace("warning", "low").title(), + "severity": details[0].title(), "description": details[1] + "\n\n**Certificate Info:** " + certificate_info, "file_path": None } @@ -143,7 +143,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Certificate Analysis", "title": details[1], - "severity": details[0].replace("warning", "low").title(), + "severity": details[0].title(), "description": details[1] + "\n\n**Certificate Info:** " + certificate_info, "file_path": None } @@ -159,7 +159,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Manifest Analysis", "title": details["title"], - "severity": details["severity"].replace("warning", "low").title(), + "severity": details["severity"].title(), "description": details["description"] + "\n\n " + details["name"], "file_path": None } @@ -169,7 +169,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Manifest Analysis", "title": details["title"], - "severity": details["stat"].replace("warning", "low").title(), + "severity": details["stat"].title(), "description": details["desc"] + "\n\n " + details["name"], "file_path": None } @@ -184,7 +184,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Code Analysis", "title": details, - "severity": metadata["metadata"]["severity"].replace("warning", "low").title(), + "severity": metadata["metadata"]["severity"].title(), "description": metadata["metadata"]["description"], "file_path": None } @@ -196,7 +196,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Code Analysis", "title": details, - "severity": metadata["metadata"]["severity"].replace("warning", "low").title(), + "severity": metadata["metadata"]["severity"].title(), "description": metadata["metadata"]["description"], "file_path": None } @@ -211,7 +211,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Binary Analysis", "title": details[binary_analysis_type]["description"].split(".")[0], - "severity": details[binary_analysis_type]["severity"].replace("warning", "low").title(), + "severity": details[binary_analysis_type]["severity"].title(), "description": details[binary_analysis_type]["description"], "file_path": details["name"] } @@ -230,7 +230,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Binary Analysis", "title": details["detailed_desc"], - "severity": details["severity"].replace("good", "info").title(), + "severity": details["severity"].title(), "description": details["detailed_desc"], "file_path": None } @@ -248,7 +248,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Binary Analysis", "title": details["detailed_desc"], - "severity": details["severity"].replace("good", "info").title(), + "severity": details["severity"].title(), "description": details["detailed_desc"], "file_path": None } @@ -280,7 +280,7 @@ def get_findings(self, filename, test): mobsf_item = { "category": "Android API", "title": details["metadata"]["description"], - "severity": details["metadata"]["severity"].replace("warning", "low").title(), + "severity": details["metadata"]["severity"].title(), "description": "**API:** " + api + "\n\n**Description:** " + details["metadata"]["description"], "file_path": None } @@ -372,11 +372,12 @@ def getSeverityForPermission(self, status): # Criticality rating def getCriticalityRating(self, rating): criticality = "Info" - if rating == "warning": + if rating == "Good": criticality = "Info" + if rating == "Warning": + criticality = "Low" else: criticality = rating.capitalize() - return criticality def suite_data(self, suites): From 496ea37a543c9a93391008209213841244d5ca86 Mon Sep 17 00:00:00 2001 From: kiblik Date: Fri, 16 Feb 2024 04:39:53 +0000 Subject: [PATCH 33/34] Remove filterwarnings for "DateTimeField - timezone" (#9497) * Drop filterwarnings "DateTimeField - timezone" * Fix some * Fix of RA test + importers * Fix RA * Fix importers * Fix Flake8 --------- Co-authored-by: Matt Tesauro --- dojo/importers/importer/importer.py | 5 +++++ dojo/importers/reimporter/reimporter.py | 5 +++++ dojo/settings/settings.dist.py | 2 +- unittests/test_bulk_risk_acceptance_api.py | 22 ++++++++++---------- unittests/test_finding_helper.py | 6 +++--- unittests/test_flush_auditlog.py | 8 +++---- unittests/test_import_reimport.py | 4 ++-- unittests/test_risk_acceptance.py | 7 ++++--- unittests/test_utils_deduplication_reopen.py | 2 +- 9 files changed, 36 insertions(+), 25 deletions(-) diff --git a/dojo/importers/importer/importer.py b/dojo/importers/importer/importer.py index 4b3b1d43c6c..7552f9184ef 100644 --- a/dojo/importers/importer/importer.py +++ b/dojo/importers/importer/importer.py @@ -103,6 +103,11 @@ def process_parsed_findings(self, test, parsed_findings, scan_type, user, active # finding's severity is below the configured threshold : ignoring the finding continue + # Some parsers provide "mitigated" field but do not set timezone (because they are probably not available in the report) + # Finding.mitigated is DateTimeField and it requires timezone + if item.mitigated and not item.mitigated.tzinfo: + item.mitigated = item.mitigated.replace(tzinfo=now.tzinfo) + item.test = test item.reporter = user if user else get_current_user item.last_reviewed = now diff --git a/dojo/importers/reimporter/reimporter.py b/dojo/importers/reimporter/reimporter.py index 107068d11fa..d02d1dc1b17 100644 --- a/dojo/importers/reimporter/reimporter.py +++ b/dojo/importers/reimporter/reimporter.py @@ -89,6 +89,11 @@ def process_parsed_findings( item.component_version if hasattr(item, "component_version") else None ) + # Some parsers provide "mitigated" field but do not set timezone (because it is probably not available in the report) + # Finding.mitigated is DateTimeField and it requires timezone + if item.mitigated and not item.mitigated.tzinfo: + item.mitigated = item.mitigated.replace(tzinfo=now.tzinfo) + if not hasattr(item, "test"): item.test = test diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index c928cd7d172..52972291844 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1691,4 +1691,4 @@ def saml2_attrib_map_format(dict): from django.utils.deprecation import RemovedInDjango50Warning warnings.filterwarnings("ignore", category=RemovedInDjango50Warning) warnings.filterwarnings("ignore", message="'cgi' is deprecated and slated for removal in Python 3\\.13") - warnings.filterwarnings("ignore", message="DateTimeField .+ received a naive datetime .+ while time zone support is active\\.") + warnings.filterwarnings("ignore", message="unclosed file .+") diff --git a/unittests/test_bulk_risk_acceptance_api.py b/unittests/test_bulk_risk_acceptance_api.py index dafa7d321a2..f19ccb96022 100644 --- a/unittests/test_bulk_risk_acceptance_api.py +++ b/unittests/test_bulk_risk_acceptance_api.py @@ -18,25 +18,25 @@ def setUpTestData(cls): cls.product = Product.objects.create(prod_type=cls.product_type, name='Flopper', description='Test product') Product_Type_Member.objects.create(product_type=cls.product_type, user=cls.user, role=Role.objects.get(id=Roles.Owner)) cls.product_2 = Product.objects.create(prod_type=cls.product_type, name='Flopper2', description='Test product2') - cls.engagement = Engagement.objects.create(product=cls.product, target_start=datetime.date(2000, 1, 1), - target_end=datetime.date(2000, 2, 1)) - cls.engagement_2a = Engagement.objects.create(product=cls.product_2, target_start=datetime.date(2000, 1, 1), - target_end=datetime.date(2000, 2, 1)) - cls.engagement_2b = Engagement.objects.create(product=cls.product_2, target_start=datetime.date(2000, 1, 1), - target_end=datetime.date(2000, 2, 1)) + cls.engagement = Engagement.objects.create(product=cls.product, target_start=datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc), + target_end=datetime.datetime(2000, 2, 1, tzinfo=datetime.timezone.utc)) + cls.engagement_2a = Engagement.objects.create(product=cls.product_2, target_start=datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc), + target_end=datetime.datetime(2000, 2, 1, tzinfo=datetime.timezone.utc)) + cls.engagement_2b = Engagement.objects.create(product=cls.product_2, target_start=datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc), + target_end=datetime.datetime(2000, 2, 1, tzinfo=datetime.timezone.utc)) cls.test_type = Test_Type.objects.create(name='Risk Acceptance Mock Scan', static_tool=True) cls.test_a = Test.objects.create(engagement=cls.engagement, test_type=cls.test_type, - target_start=datetime.date(2000, 1, 1), target_end=datetime.date(2000, 2, 1)) + target_start=datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc), target_end=datetime.datetime(2000, 2, 1, tzinfo=datetime.timezone.utc)) cls.test_b = Test.objects.create(engagement=cls.engagement, test_type=cls.test_type, - target_start=datetime.date(2000, 1, 1), target_end=datetime.date(2000, 2, 1)) + target_start=datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc), target_end=datetime.datetime(2000, 2, 1, tzinfo=datetime.timezone.utc)) cls.test_c = Test.objects.create(engagement=cls.engagement, test_type=cls.test_type, - target_start=datetime.date(2000, 1, 1), target_end=datetime.date(2000, 2, 1)) + target_start=datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc), target_end=datetime.datetime(2000, 2, 1, tzinfo=datetime.timezone.utc)) cls.test_d = Test.objects.create(engagement=cls.engagement_2a, test_type=cls.test_type, - target_start=datetime.date(2000, 1, 1), target_end=datetime.date(2000, 2, 1)) + target_start=datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc), target_end=datetime.datetime(2000, 2, 1, tzinfo=datetime.timezone.utc)) cls.test_e = Test.objects.create(engagement=cls.engagement_2b, test_type=cls.test_type, - target_start=datetime.date(2000, 1, 1), target_end=datetime.date(2000, 2, 1)) + target_start=datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc), target_end=datetime.datetime(2000, 2, 1, tzinfo=datetime.timezone.utc)) def create_finding(test: Test, reporter: User, cve: str) -> Finding: return Finding(test=test, title='Finding {}'.format(cve), cve=cve, severity='High', verified=True, diff --git a/unittests/test_finding_helper.py b/unittests/test_finding_helper.py index 00f7198234b..d3e6bf71293 100644 --- a/unittests/test_finding_helper.py +++ b/unittests/test_finding_helper.py @@ -93,7 +93,7 @@ def test_mark_old_active_as_mitigated(self, mock_can_edit, mock_tz): def test_mark_old_active_as_mitigated_custom_edit(self, mock_can_edit, mock_tz): mock_tz.return_value = frozen_datetime - custom_mitigated = datetime.datetime.now() + custom_mitigated = datetime.datetime.now(datetime.timezone.utc) with impersonate(self.user_1): test = Test.objects.last() @@ -115,7 +115,7 @@ def test_mark_old_active_as_mitigated_custom_edit(self, mock_can_edit, mock_tz): def test_update_old_mitigated_with_custom_edit(self, mock_can_edit, mock_tz): mock_tz.return_value = frozen_datetime - custom_mitigated = datetime.datetime.now() + custom_mitigated = datetime.datetime.now(datetime.timezone.utc) with impersonate(self.user_1): test = Test.objects.last() @@ -137,7 +137,7 @@ def test_update_old_mitigated_with_custom_edit(self, mock_can_edit, mock_tz): def test_update_old_mitigated_with_missing_data(self, mock_can_edit, mock_tz): mock_tz.return_value = frozen_datetime - custom_mitigated = datetime.datetime.now() + custom_mitigated = datetime.datetime.now(datetime.timezone.utc) with impersonate(self.user_1): test = Test.objects.last() diff --git a/unittests/test_flush_auditlog.py b/unittests/test_flush_auditlog.py index ffaeb538baa..1cbdb4ff62d 100644 --- a/unittests/test_flush_auditlog.py +++ b/unittests/test_flush_auditlog.py @@ -2,7 +2,7 @@ from .dojo_test_case import DojoTestCase from django.test import override_settings from auditlog.models import LogEntry -from datetime import date, datetime +from datetime import date, datetime, timezone from dojo.models import Finding from dateutil.relativedelta import relativedelta import logging @@ -29,8 +29,8 @@ def test_delete_all_entries(self): @override_settings(AUDITLOG_FLUSH_RETENTION_PERIOD=1) def test_delete_entries_with_retention_period(self): - entries_before = LogEntry.objects.filter(timestamp__date__lt=date.today()).count() - two_weeks_ago = datetime.today() - relativedelta(weeks=2) + entries_before = LogEntry.objects.filter(timestamp__date__lt=datetime.now(timezone.utc)).count() + two_weeks_ago = datetime.now(timezone.utc) - relativedelta(weeks=2) log_entry = LogEntry.objects.log_create( instance=Finding.objects.all()[0], timestamp=two_weeks_ago, @@ -40,6 +40,6 @@ def test_delete_entries_with_retention_period(self): log_entry.timestamp = two_weeks_ago log_entry.save() flush_auditlog() - entries_after = LogEntry.objects.filter(timestamp__date__lt=date.today()).count() + entries_after = LogEntry.objects.filter(timestamp__date__lt=datetime.now(timezone.utc)).count() # we have three old log entries in our testdata and added a new one self.assertEqual(entries_before - 3 + 1, entries_after) diff --git a/unittests/test_import_reimport.py b/unittests/test_import_reimport.py index 92bcb0097d3..535bc488d19 100644 --- a/unittests/test_import_reimport.py +++ b/unittests/test_import_reimport.py @@ -1430,8 +1430,8 @@ def test_import_reimport_vulnerability_ids(self): engagement=test.engagement, test_type=test_type, scan_type=self.anchore_grype_scan_type, - target_start=datetime.datetime.now(), - target_end=datetime.datetime.now(), + target_start=datetime.datetime.now(datetime.timezone.utc), + target_end=datetime.datetime.now(datetime.timezone.utc), ) reimport_test.save() diff --git a/unittests/test_risk_acceptance.py b/unittests/test_risk_acceptance.py index e652fc132b7..e677ff4286a 100644 --- a/unittests/test_risk_acceptance.py +++ b/unittests/test_risk_acceptance.py @@ -12,6 +12,7 @@ # from unittest import skip import dojo.risk_acceptance.helper as ra_helper import logging +import datetime logger = logging.getLogger(__name__) @@ -264,9 +265,9 @@ def test_expiration_handler(self): # ra1: expire in 9 days -> warn:yes, expire:no # ra2: expire in 11 days -> warn:no, expire:no # ra3: expire 5 days ago -> warn:no, expire:yes (expiration not handled yet, so expire) - ra1.expiration_date = timezone.now().date() + relativedelta(days=heads_up_days - 1) - ra2.expiration_date = timezone.now().date() + relativedelta(days=heads_up_days + 1) - ra3.expiration_date = timezone.now().date() - relativedelta(days=5) + ra1.expiration_date = datetime.datetime.now(datetime.timezone.utc) + relativedelta(days=heads_up_days - 1) + ra2.expiration_date = datetime.datetime.now(datetime.timezone.utc) + relativedelta(days=heads_up_days + 1) + ra3.expiration_date = datetime.datetime.now(datetime.timezone.utc) - relativedelta(days=5) ra1.save() ra2.save() ra3.save() diff --git a/unittests/test_utils_deduplication_reopen.py b/unittests/test_utils_deduplication_reopen.py index 0d4e7c24d31..50ff0832e0a 100644 --- a/unittests/test_utils_deduplication_reopen.py +++ b/unittests/test_utils_deduplication_reopen.py @@ -14,7 +14,7 @@ def setUp(self): self.finding_a = Finding.objects.get(id=2) self.finding_a.pk = None self.finding_a.duplicate = False - self.finding_a.mitigated = datetime.date(1970, 1, 1) + self.finding_a.mitigated = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc) self.finding_a.is_mitigated = True self.finding_a.false_p = True self.finding_a.active = False From 72e20ea8af6ce7f1184abfaaf9cb587fbde55d1a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 23:32:51 -0600 Subject: [PATCH 34/34] Update Helm release postgresql-ha from 9.4.11 to v13 (helm/defectdojo/Chart.yaml) (#9553) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- helm/defectdojo/Chart.lock | 6 +++--- helm/defectdojo/Chart.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index 784d84b4843..9f0d8587154 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -7,12 +7,12 @@ dependencies: version: 11.9.13 - name: postgresql-ha repository: https://charts.bitnami.com/bitnami - version: 9.4.11 + version: 13.2.4 - name: rabbitmq repository: https://charts.bitnami.com/bitnami version: 11.16.2 - name: redis repository: https://charts.bitnami.com/bitnami version: 16.13.2 -digest: sha256:50d07c49c1fb199a70fafd032712a1d5509a0352f090bfddd2e8a22b35be0961 -generated: "2024-02-15T20:24:24.560785941Z" +digest: sha256:e33a1f5fbe1601251363f38e2cc639074ef7b85c4d4e69d04967dfcdf5d1d70e +generated: "2024-02-16T04:41:43.560933367Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 1c44736dafb..3704d2ca133 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -18,7 +18,7 @@ dependencies: repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha - version: ~9.4.0 + version: ~13.2.0 repository: "https://charts.bitnami.com/bitnami" alias: postgresqlha condition: postgresqlha.enabled