diff --git a/docs/content/en/integrations/parsers/file/acunetix.md b/docs/content/en/integrations/parsers/file/acunetix.md index 96a2c2005cc..97a2124e8ac 100644 --- a/docs/content/en/integrations/parsers/file/acunetix.md +++ b/docs/content/en/integrations/parsers/file/acunetix.md @@ -2,7 +2,7 @@ title: "Acunetix Scanner" toc_hide: true --- -XML format +This parser imports the Acunetix Scanner with xml output or Acunetix 360 Scanner with JSON output. ### Sample Scan Data Sample Acunetix Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/acunetix). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/acunetix360.md b/docs/content/en/integrations/parsers/file/acunetix360.md deleted file mode 100644 index 01b208bbeaa..00000000000 --- a/docs/content/en/integrations/parsers/file/acunetix360.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: "Acunetix 360 Scanner" -toc_hide: true ---- -Vulnerabilities List - JSON report - -### Sample Scan Data -Sample Acunetix 360 Scanner scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/acunetix360). \ No newline at end of file diff --git a/dojo/db_migrations/0208_merge_acunetix.py b/dojo/db_migrations/0208_merge_acunetix.py new file mode 100644 index 00000000000..601f4027144 --- /dev/null +++ b/dojo/db_migrations/0208_merge_acunetix.py @@ -0,0 +1,55 @@ +from django.db import migrations +import logging + + +logger = logging.getLogger(__name__) + + +PARSER_REFERENCES = ['Acunetix360 Scan'] + + +def update_parser_test(test, parser_test_type) -> None: + if test.test_type.name in PARSER_REFERENCES or test.scan_type in PARSER_REFERENCES: + test.test_type = parser_test_type + test.scan_type = parser_test_type.name + test.save() + + +# Update the found_by field to remove Acunetix360 and add Acunetix +def update_parser_finding(finding, newparser_test_type, parser_test_type) -> None: + # Check if nessus is in found by list and remove + if parser_test_type in finding.found_by.all(): + finding.found_by.remove(parser_test_type.id) + # Check if tenable is already in list somehow before adding it + if newparser_test_type not in finding.found_by.all(): + finding.found_by.add(newparser_test_type.id) + finding.save() + + +# Update all finding objects that came from Acunetix360 reports +def forward_merge_parser(apps, schema_editor): + finding_model = apps.get_model('dojo', 'Finding') + test_type_model = apps.get_model('dojo', 'Test_Type') + # Get or create Acunetix Scan Test Type and fetch the Acunetix360 Scan test types + newparser_test_type, _ = test_type_model.objects.get_or_create(name="Acunetix Scan", defaults={"active": True}) + parser_test_type = test_type_model.objects.filter(name="Acunetix360 Scan").first() + # Get all the findings found by Acunetix360 Scan + findings = finding_model.objects.filter(test__scan_type__in=PARSER_REFERENCES) + logger.warning(f'We identified {findings.count()} Acunetix360 Scan findings to migrate to Acunetix Scan findings') + # Iterate over all findings and change + for finding in findings: + # Update the found by field + update_parser_finding(finding, newparser_test_type, parser_test_type) + # Update the test object + update_parser_test(finding.test, newparser_test_type) + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0207_alter_sonarqube_issue_key'), + ] + + operations = [ + migrations.RunPython(forward_merge_parser), + ] diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 416b200ed63..c727ef3521e 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1201,7 +1201,6 @@ def saml2_attrib_map_format(dict): 'Symfony Security Check': ['title', 'vulnerability_ids'], 'DSOP Scan': ['vulnerability_ids'], 'Acunetix Scan': ['title', 'description'], - 'Acunetix360 Scan': ['title', 'description'], 'Terrascan Scan': ['vuln_id_from_tool', 'title', 'severity', 'file_path', 'line', 'component_name'], 'Trivy Operator Scan': ['title', 'severity', 'vulnerability_ids'], 'Trivy Scan': ['title', 'severity', 'vulnerability_ids', 'cwe', 'description'], @@ -1289,7 +1288,6 @@ def saml2_attrib_map_format(dict): 'Qualys Scan': True, 'DSOP Scan': True, 'Acunetix Scan': True, - 'Acunetix360 Scan': True, 'Trivy Operator Scan': True, 'Trivy Scan': True, 'SpotBugs Scan': False, @@ -1389,7 +1387,6 @@ def saml2_attrib_map_format(dict): 'Qualys Scan': DEDUPE_ALGO_HASH_CODE, 'PHP Symfony Security Check': DEDUPE_ALGO_HASH_CODE, 'Acunetix Scan': DEDUPE_ALGO_HASH_CODE, - 'Acunetix360 Scan': DEDUPE_ALGO_HASH_CODE, 'Clair Scan': DEDUPE_ALGO_HASH_CODE, # 'Qualys Webapp Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, # Must also uncomment qualys webapp line in hashcode fields per scanner 'Veracode Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, diff --git a/dojo/tools/acunetix360/parser.py b/dojo/tools/acunetix/parse_acunetix360_json.py similarity index 91% rename from dojo/tools/acunetix360/parser.py rename to dojo/tools/acunetix/parse_acunetix360_json.py index 2639e4567f0..f9fff0b109c 100644 --- a/dojo/tools/acunetix360/parser.py +++ b/dojo/tools/acunetix/parse_acunetix360_json.py @@ -1,28 +1,19 @@ import json +from dateutil import parser import html2text - from cvss import parser as cvss_parser -from dateutil import parser -from dojo.models import Finding, Endpoint - +from dojo.models import Endpoint, Finding -class Acunetix360Parser(object): - def get_scan_types(self): - return ["Acunetix360 Scan"] - - def get_label_for_scan_types(self, scan_type): - return "Acunetix360 Scan" - - def get_description_for_scan_types(self, scan_type): - return "Acunetix360 JSON format." +class AcunetixJSONParser(object): + """This parser is written for Acunetix JSON Findings.""" def get_findings(self, filename, test): + dupes = dict() data = json.load(filename) dupes = dict() scan_date = parser.parse(data["Generated"]) text_maker = html2text.HTML2Text() text_maker.body_width = 0 - for item in data["Vulnerabilities"]: title = item["Name"] findingdetail = text_maker.handle(item.get("Description", "")) @@ -53,7 +44,6 @@ def get_findings(self, filename, test): response = item["HttpResponse"]["Content"] if response is None or len(response) <= 0: response = "Response Not Found" - finding = Finding( title=title, test=test, @@ -66,7 +56,6 @@ def get_findings(self, filename, test): cwe=cwe, static_finding=True, ) - if ( (item["Classification"] is not None) and (item["Classification"]["Cvss"] is not None) @@ -86,19 +75,15 @@ def get_findings(self, filename, test): elif "FalsePositive" in state: finding.false_p = True finding.active = False - finding.unsaved_req_resp = [{"req": request, "resp": response}] finding.unsaved_endpoints = [Endpoint.from_uri(url)] - if item.get("FirstSeenDate"): parseddate = parser.parse(item["FirstSeenDate"]) finding.date = parseddate - if dupe_key in dupes: find = dupes[dupe_key] find.unsaved_req_resp.extend(finding.unsaved_req_resp) find.unsaved_endpoints.extend(finding.unsaved_endpoints) else: dupes[dupe_key] = finding - return list(dupes.values()) diff --git a/dojo/tools/acunetix/parse_acunetix_xml.py b/dojo/tools/acunetix/parse_acunetix_xml.py new file mode 100644 index 00000000000..12ca4100a03 --- /dev/null +++ b/dojo/tools/acunetix/parse_acunetix_xml.py @@ -0,0 +1,176 @@ +import hashlib +import dateutil +import html2text +import logging +import hyperlink +from cvss import parser as cvss_parser +from defusedxml.ElementTree import parse +from dojo.models import Endpoint, Finding +logger = logging.getLogger(__name__) + + +class AcunetixXMLParser(object): + """This parser is written for Acunetix XML reports""" + def get_findings(self, filename, test): + dupes = dict() + root = parse(filename).getroot() + for scan in root.findall("Scan"): + start_url = scan.findtext("StartURL") + if ":" not in start_url: + start_url = "//" + start_url + # get report date + if scan.findtext("StartTime") and "" != scan.findtext("StartTime"): + report_date = dateutil.parser.parse( + scan.findtext("StartTime") + ).date() + for item in scan.findall("ReportItems/ReportItem"): + finding = Finding( + test=test, + title=item.findtext("Name"), + severity=self.get_severity(item.findtext("Severity")), + description=html2text.html2text( + item.findtext("Description") + ).strip(), + false_p=self.get_false_positive( + item.findtext("IsFalsePositive") + ), + static_finding=True, + dynamic_finding=False, + nb_occurences=1, + ) + if item.findtext("Impact") and "" != item.findtext("Impact"): + finding.impact = item.findtext("Impact") + if item.findtext("Recommendation") and "" != item.findtext( + "Recommendation" + ): + finding.mitigation = item.findtext("Recommendation") + if report_date: + finding.date = report_date + if item.findtext("CWEList/CWE"): + finding.cwe = self.get_cwe_number( + item.findtext("CWEList/CWE") + ) + references = [] + for reference in item.findall("References/Reference"): + url = reference.findtext("URL") + db = reference.findtext("Database") or url + references.append(" * [{}]({})".format(db, url)) + if len(references) > 0: + finding.references = "\n".join(references) + if item.findtext("CVSS3/Descriptor"): + cvss_objects = cvss_parser.parse_cvss_from_text( + item.findtext("CVSS3/Descriptor") + ) + if len(cvss_objects) > 0: + finding.cvssv3 = cvss_objects[0].clean_vector() + # more description are in "Details" + if ( + item.findtext("Details") + and len(item.findtext("Details").strip()) > 0 + ): + finding.description += "\n\n**Details:**\n{}".format( + html2text.html2text(item.findtext("Details")) + ) + if ( + item.findtext("TechnicalDetails") + and len(item.findtext("TechnicalDetails").strip()) > 0 + ): + finding.description += ( + "\n\n**TechnicalDetails:**\n\n{}".format( + item.findtext("TechnicalDetails") + ) + ) + # add requests + finding.unsaved_req_resp = list() + if len(item.findall("TechnicalDetails/Request")): + finding.dynamic_finding = ( + True # if there is some requests it's dynamic + ) + finding.static_finding = ( + False # if there is some requests it's dynamic + ) + for request in item.findall("TechnicalDetails/Request"): + finding.unsaved_req_resp.append( + {"req": (request.text or ""), "resp": ""} + ) + # manage the endpoint + url = hyperlink.parse(start_url) + endpoint = Endpoint( + host=url.host, + port=url.port, + path=item.findtext("Affects"), + ) + if url.scheme is not None and "" != url.scheme: + endpoint.protocol = url.scheme + finding.unsaved_endpoints = [endpoint] + dupe_key = hashlib.sha256( + "|".join( + [ + finding.title, + str(finding.impact), + str(finding.mitigation), + ] + ).encode("utf-8") + ).hexdigest() + if dupe_key in dupes: + find = dupes[dupe_key] + # add details for the duplicate finding + if ( + item.findtext("Details") + and len(item.findtext("Details").strip()) > 0 + ): + find.description += ( + "\n-----\n\n**Details:**\n{}".format( + html2text.html2text(item.findtext("Details")) + ) + ) + find.unsaved_endpoints.extend(finding.unsaved_endpoints) + find.unsaved_req_resp.extend(finding.unsaved_req_resp) + find.nb_occurences += finding.nb_occurences + logger.debug( + "Duplicate finding : {defectdojo_title}".format( + defectdojo_title=finding.title + ) + ) + else: + dupes[dupe_key] = finding + return list(dupes.values()) + + def get_cwe_number(self, cwe): + """ + Returns cwe number. + :param cwe: + :return: cwe number + """ + if cwe is None: + return None + else: + return int(cwe.split("-")[1]) + + def get_severity(self, severity): + """ + Returns Severity as per DefectDojo standards. + :param severity: + :return: + """ + if severity == "high": + return "High" + elif severity == "medium": + return "Medium" + elif severity == "low": + return "Low" + elif severity == "informational": + return "Info" + else: + return "Critical" + + def get_false_positive(self, false_p): + """ + Returns True, False for false positive as per DefectDojo standards. + :param false_p: + :return: + """ + if false_p: + return True + else: + return False diff --git a/dojo/tools/acunetix/parser.py b/dojo/tools/acunetix/parser.py index 3227d20e188..9d0ee771230 100644 --- a/dojo/tools/acunetix/parser.py +++ b/dojo/tools/acunetix/parser.py @@ -1,18 +1,9 @@ -import hashlib -import logging - -import dateutil -import html2text -import hyperlink -from cvss import parser as cvss_parser -from defusedxml.ElementTree import parse -from dojo.models import Endpoint, Finding - -logger = logging.getLogger(__name__) +from dojo.tools.acunetix.parse_acunetix360_json import AcunetixJSONParser +from dojo.tools.acunetix.parse_acunetix_xml import AcunetixXMLParser class AcunetixParser(object): - """Parser for Acunetix XML files.""" + """Parser for Acunetix XML files and Acunetix 360 JSON files.""" def get_scan_types(self): return ["Acunetix Scan"] @@ -21,182 +12,10 @@ def get_label_for_scan_types(self, scan_type): return "Acunetix Scanner" def get_description_for_scan_types(self, scan_type): - return "XML format" - - def get_findings(self, xml_output, test): - root = parse(xml_output).getroot() - - dupes = dict() - for scan in root.findall("Scan"): - start_url = scan.findtext("StartURL") - if ":" not in start_url: - start_url = "//" + start_url - # get report date - if scan.findtext("StartTime") and "" != scan.findtext("StartTime"): - report_date = dateutil.parser.parse( - scan.findtext("StartTime") - ).date() - - for item in scan.findall("ReportItems/ReportItem"): - finding = Finding( - test=test, - title=item.findtext("Name"), - severity=self.get_severity(item.findtext("Severity")), - description=html2text.html2text( - item.findtext("Description") - ).strip(), - false_p=self.get_false_positive( - item.findtext("IsFalsePositive") - ), - static_finding=True, - dynamic_finding=False, - nb_occurences=1, - ) - - if item.findtext("Impact") and "" != item.findtext("Impact"): - finding.impact = item.findtext("Impact") - - if item.findtext("Recommendation") and "" != item.findtext( - "Recommendation" - ): - finding.mitigation = item.findtext("Recommendation") - - if report_date: - finding.date = report_date - - if item.findtext("CWEList/CWE"): - finding.cwe = self.get_cwe_number( - item.findtext("CWEList/CWE") - ) - - references = [] - for reference in item.findall("References/Reference"): - url = reference.findtext("URL") - db = reference.findtext("Database") or url - references.append(" * [{}]({})".format(db, url)) - if len(references) > 0: - finding.references = "\n".join(references) - - if item.findtext("CVSS3/Descriptor"): - cvss_objects = cvss_parser.parse_cvss_from_text( - item.findtext("CVSS3/Descriptor") - ) - if len(cvss_objects) > 0: - finding.cvssv3 = cvss_objects[0].clean_vector() - - # more description are in "Details" - if ( - item.findtext("Details") - and len(item.findtext("Details").strip()) > 0 - ): - finding.description += "\n\n**Details:**\n{}".format( - html2text.html2text(item.findtext("Details")) - ) - if ( - item.findtext("TechnicalDetails") - and len(item.findtext("TechnicalDetails").strip()) > 0 - ): - finding.description += ( - "\n\n**TechnicalDetails:**\n\n{}".format( - item.findtext("TechnicalDetails") - ) - ) - - # add requests - finding.unsaved_req_resp = list() - if len(item.findall("TechnicalDetails/Request")): - finding.dynamic_finding = ( - True # if there is some requests it's dynamic - ) - finding.static_finding = ( - False # if there is some requests it's dynamic - ) - for request in item.findall("TechnicalDetails/Request"): - finding.unsaved_req_resp.append( - {"req": (request.text or ""), "resp": ""} - ) - - # manage the endpoint - url = hyperlink.parse(start_url) - endpoint = Endpoint( - host=url.host, - port=url.port, - path=item.findtext("Affects"), - ) - if url.scheme is not None and "" != url.scheme: - endpoint.protocol = url.scheme - finding.unsaved_endpoints = [endpoint] - - dupe_key = hashlib.sha256( - "|".join( - [ - finding.title, - str(finding.impact), - str(finding.mitigation), - ] - ).encode("utf-8") - ).hexdigest() - - if dupe_key in dupes: - find = dupes[dupe_key] - # add details for the duplicate finding - if ( - item.findtext("Details") - and len(item.findtext("Details").strip()) > 0 - ): - find.description += ( - "\n-----\n\n**Details:**\n{}".format( - html2text.html2text(item.findtext("Details")) - ) - ) - find.unsaved_endpoints.extend(finding.unsaved_endpoints) - find.unsaved_req_resp.extend(finding.unsaved_req_resp) - find.nb_occurences += finding.nb_occurences - logger.debug( - "Duplicate finding : {defectdojo_title}".format( - defectdojo_title=finding.title - ) - ) - else: - dupes[dupe_key] = finding - - return list(dupes.values()) - - def get_cwe_number(self, cwe): - """ - Returns cwe number. - :param cwe: - :return: cwe number - """ - if cwe is None: - return None - else: - return int(cwe.split("-")[1]) - - def get_severity(self, severity): - """ - Returns Severity as per DefectDojo standards. - :param severity: - :return: - """ - if severity == "high": - return "High" - elif severity == "medium": - return "Medium" - elif severity == "low": - return "Low" - elif severity == "informational": - return "Info" - else: - return "Critical" + return "Acunetix Scanner in XML format or Acunetix 360 Scanner in JSON format" - def get_false_positive(self, false_p): - """ - Returns True, False for false positive as per DefectDojo standards. - :param false_p: - :return: - """ - if false_p: - return True - else: - return False + def get_findings(self, filename, test): + if '.xml' in str(filename): + return AcunetixXMLParser().get_findings(filename, test) + elif '.json' in str(filename): + return AcunetixJSONParser().get_findings(filename, test) diff --git a/dojo/tools/acunetix360/__init__.py b/dojo/tools/acunetix360/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/unittests/scans/acunetix360/acunetix360_many_findings.json b/unittests/scans/acunetix/acunetix360_many_findings.json similarity index 100% rename from unittests/scans/acunetix360/acunetix360_many_findings.json rename to unittests/scans/acunetix/acunetix360_many_findings.json diff --git a/unittests/scans/acunetix360/acunetix360_multiple_cwe.json b/unittests/scans/acunetix/acunetix360_multiple_cwe.json similarity index 100% rename from unittests/scans/acunetix360/acunetix360_multiple_cwe.json rename to unittests/scans/acunetix/acunetix360_multiple_cwe.json diff --git a/unittests/scans/acunetix360/acunetix360_one_finding.json b/unittests/scans/acunetix/acunetix360_one_finding.json similarity index 100% rename from unittests/scans/acunetix360/acunetix360_one_finding.json rename to unittests/scans/acunetix/acunetix360_one_finding.json diff --git a/unittests/scans/acunetix360/acunetix360_one_finding_accepted_risk.json b/unittests/scans/acunetix/acunetix360_one_finding_accepted_risk.json similarity index 100% rename from unittests/scans/acunetix360/acunetix360_one_finding_accepted_risk.json rename to unittests/scans/acunetix/acunetix360_one_finding_accepted_risk.json diff --git a/unittests/scans/acunetix360/acunetix360_one_finding_false_positive.json b/unittests/scans/acunetix/acunetix360_one_finding_false_positive.json similarity index 100% rename from unittests/scans/acunetix360/acunetix360_one_finding_false_positive.json rename to unittests/scans/acunetix/acunetix360_one_finding_false_positive.json diff --git a/unittests/scans/acunetix360/acunetix360_zero_finding.json b/unittests/scans/acunetix/acunetix360_zero_finding.json similarity index 100% rename from unittests/scans/acunetix360/acunetix360_zero_finding.json rename to unittests/scans/acunetix/acunetix360_zero_finding.json diff --git a/unittests/tools/test_acunetix360_parser.py b/unittests/tools/test_acunetix360_parser.py deleted file mode 100644 index d491a1de2b1..00000000000 --- a/unittests/tools/test_acunetix360_parser.py +++ /dev/null @@ -1,128 +0,0 @@ -from ..dojo_test_case import DojoTestCase -from dojo.models import Test -from dojo.tools.acunetix360.parser import Acunetix360Parser -from datetime import datetime - - -class TestAcunetix360Parser(DojoTestCase): - - def test_parse_file_with_one_finding(self): - testfile = open("unittests/scans/acunetix360/acunetix360_one_finding.json") - parser = Acunetix360Parser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(1, len(findings)) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("Medium", finding.severity) - self.assertEqual(16, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) - self.assertEqual(1, len(finding.unsaved_endpoints)) - endpoint = finding.unsaved_endpoints[0] - self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") - self.assertEqual(finding.date, datetime(2021, 6, 16, 12, 30)) - self.assertIn("https://online.acunetix360.com/issues/detail/735f4503-e9eb-4b4c-4306-ad49020a4c4b", finding.references) - - def test_parse_file_with_one_finding_false_positive(self): - testfile = open("unittests/scans/acunetix360/acunetix360_one_finding_false_positive.json") - parser = Acunetix360Parser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(1, len(findings)) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("Medium", finding.severity) - self.assertEqual(16, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) - self.assertEqual(1, len(finding.unsaved_endpoints)) - endpoint = finding.unsaved_endpoints[0] - self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") - self.assertTrue(finding.false_p) - - def test_parse_file_with_one_finding_risk_accepted(self): - testfile = open("unittests/scans/acunetix360/acunetix360_one_finding_accepted_risk.json") - parser = Acunetix360Parser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(1, len(findings)) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("Medium", finding.severity) - self.assertEqual(16, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) - self.assertEqual(1, len(finding.unsaved_endpoints)) - endpoint = finding.unsaved_endpoints[0] - self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") - self.assertTrue(finding.risk_accepted) - - def test_parse_file_with_multiple_finding(self): - testfile = open("unittests/scans/acunetix360/acunetix360_many_findings.json") - parser = Acunetix360Parser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(16, len(findings)) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("Medium", finding.severity) - self.assertEqual(16, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) - self.assertEqual(1, len(finding.unsaved_endpoints)) - endpoint = finding.unsaved_endpoints[0] - self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") - - with self.subTest(i=1): - finding = findings[1] - self.assertEqual("Critical", finding.severity) - self.assertEqual(89, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", finding.cvssv3) - self.assertEqual(1, len(finding.unsaved_endpoints)) - endpoint = finding.unsaved_endpoints[0] - self.assertEqual(str(endpoint), "http://php.testsparker.com/artist.php?id=-1%20OR%2017-7=10") - - with self.subTest(i=2): - finding = findings[2] - self.assertEqual("Medium", finding.severity) - self.assertEqual(205, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:N/S:U/C:N/I:L/A:N/E:H/RL:O/RC:C", finding.cvssv3) - self.assertEqual(1, len(finding.unsaved_endpoints)) - endpoint = finding.unsaved_endpoints[0] - self.assertEqual(str(endpoint), "http://php.testsparker.com") - - def test_parse_file_with_mulitple_cwe(self): - testfile = open("unittests/scans/acunetix360/acunetix360_multiple_cwe.json") - parser = Acunetix360Parser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(1, len(findings)) - for finding in findings: - for endpoint in finding.unsaved_endpoints: - endpoint.clean() - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("Medium", finding.severity) - self.assertEqual(16, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) - self.assertEqual(1, len(finding.unsaved_endpoints)) - endpoint = finding.unsaved_endpoints[0] - self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") diff --git a/unittests/tools/test_acunetix_parser.py b/unittests/tools/test_acunetix_parser.py index 0ee5be5dc35..eedf73b159f 100644 --- a/unittests/tools/test_acunetix_parser.py +++ b/unittests/tools/test_acunetix_parser.py @@ -1,8 +1,8 @@ import datetime - from ..dojo_test_case import DojoTestCase from dojo.models import Test from dojo.tools.acunetix.parser import AcunetixParser +from datetime import datetime as date class TestAcunetixParser(DojoTestCase): @@ -201,3 +201,124 @@ def test_parse_file_with_example_com(self): self.assertIn('resp', req_resp) self.assertIsNotNone(req_resp['resp']) self.assertIsInstance(req_resp['resp'], str) + + def test_parse_file_with_one_finding_acunetix360(self): + testfile = open("unittests/scans/acunetix/acunetix360_one_finding.json") + parser = AcunetixParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(1, len(findings)) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("Medium", finding.severity) + self.assertEqual(16, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) + self.assertEqual(1, len(finding.unsaved_endpoints)) + endpoint = finding.unsaved_endpoints[0] + self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") + self.assertEqual(finding.date, date(2021, 6, 16, 12, 30)) + self.assertIn("https://online.acunetix360.com/issues/detail/735f4503-e9eb-4b4c-4306-ad49020a4c4b", finding.references) + + def test_parse_file_with_one_finding_false_positive(self): + testfile = open("unittests/scans/acunetix/acunetix360_one_finding_false_positive.json") + parser = AcunetixParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(1, len(findings)) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("Medium", finding.severity) + self.assertEqual(16, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) + self.assertEqual(1, len(finding.unsaved_endpoints)) + endpoint = finding.unsaved_endpoints[0] + self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") + self.assertTrue(finding.false_p) + + def test_parse_file_with_one_finding_risk_accepted(self): + testfile = open("unittests/scans/acunetix/acunetix360_one_finding_accepted_risk.json") + parser = AcunetixParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(1, len(findings)) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("Medium", finding.severity) + self.assertEqual(16, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) + self.assertEqual(1, len(finding.unsaved_endpoints)) + endpoint = finding.unsaved_endpoints[0] + self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") + self.assertTrue(finding.risk_accepted) + + def test_parse_file_with_multiple_finding_acunetix360(self): + testfile = open("unittests/scans/acunetix/acunetix360_many_findings.json") + parser = AcunetixParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(16, len(findings)) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("Medium", finding.severity) + self.assertEqual(16, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) + self.assertEqual(1, len(finding.unsaved_endpoints)) + endpoint = finding.unsaved_endpoints[0] + self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php") + + with self.subTest(i=1): + finding = findings[1] + self.assertEqual("Critical", finding.severity) + self.assertEqual(89, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", finding.cvssv3) + self.assertEqual(1, len(finding.unsaved_endpoints)) + endpoint = finding.unsaved_endpoints[0] + self.assertEqual(str(endpoint), "http://php.testsparker.com/artist.php?id=-1%20OR%2017-7=10") + + with self.subTest(i=2): + finding = findings[2] + self.assertEqual("Medium", finding.severity) + self.assertEqual(205, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:N/S:U/C:N/I:L/A:N/E:H/RL:O/RC:C", finding.cvssv3) + self.assertEqual(1, len(finding.unsaved_endpoints)) + endpoint = finding.unsaved_endpoints[0] + self.assertEqual(str(endpoint), "http://php.testsparker.com") + + def test_parse_file_with_mulitple_cwe(self): + testfile = open("unittests/scans/acunetix/acunetix360_multiple_cwe.json") + parser = AcunetixParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(1, len(findings)) + for finding in findings: + for endpoint in finding.unsaved_endpoints: + endpoint.clean() + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("Medium", finding.severity) + self.assertEqual(16, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N/E:H/RL:O/RC:C", finding.cvssv3) + self.assertEqual(1, len(finding.unsaved_endpoints)) + endpoint = finding.unsaved_endpoints[0] + self.assertEqual(str(endpoint), "http://php.testsparker.com/auth/login.php")