Skip to content

Commit

Permalink
Merge pull request #8876 from DefectDojo/master-into-dev/2.27.3-2.28.…
Browse files Browse the repository at this point in the history
…0-dev

Release: Merge back 2.27.3 into dev from: master-into-dev/2.27.3-2.28.0-dev
  • Loading branch information
Maffooch authored Oct 23, 2023
2 parents d674845 + d9fe213 commit eeed7f9
Show file tree
Hide file tree
Showing 9 changed files with 38 additions and 30 deletions.
2 changes: 1 addition & 1 deletion dojo/endpoint/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def delete_endpoint(request, eid):
title='Deletion of %s' % endpoint,
product=product,
description='The endpoint "%s" was deleted by %s' % (endpoint, request.user),
url=request.build_absolute_uri(reverse('endpoint')),
url=reverse('endpoint'),
icon="exclamation-triangle")
return HttpResponseRedirect(reverse('view_product', args=(product.id,)))

Expand Down
2 changes: 1 addition & 1 deletion dojo/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -2041,7 +2041,7 @@ class Meta:
'target_end', 'notes', 'percent_complete',
'actual_time', 'engagement', 'version',
'branch_tag', 'build_id', 'commit_hash',
'api_scan_configuration']
'api_scan_configuration', 'scan_type']


class ApiAppAnalysisFilter(DojoFilter):
Expand Down
2 changes: 1 addition & 1 deletion dojo/finding/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1445,7 +1445,7 @@ def reopen_finding(request, fid):
finding=finding,
description='The finding "%s" was reopened by %s'
% (finding.title, request.user),
url=request.build_absolute_uri(reverse("view_test", args=(finding.test.id,))),
url=reverse("view_finding", args=(finding.id,)),
)
return HttpResponseRedirect(reverse("view_finding", args=(finding.id,)))

Expand Down
12 changes: 8 additions & 4 deletions dojo/reports/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -859,7 +859,8 @@ def csv_export(request):
continue
fields.append(key)
except Exception as exc:
logger.debug('Error in attribute: ' + str(exc))
logger.error('Error in attribute: ' + str(exc))
fields.append(key)
continue
fields.append('test')
fields.append('found_by')
Expand Down Expand Up @@ -891,7 +892,8 @@ def csv_export(request):
value = value.replace('\n', ' NEWLINE ').replace('\r', '')
fields.append(value)
except Exception as exc:
logger.debug('Error in attribute: ' + str(exc))
logger.error('Error in attribute: ' + str(exc))
fields.append("Value not supported")
continue
fields.append(finding.test.title)
fields.append(finding.test.test_type.name)
Expand Down Expand Up @@ -955,7 +957,8 @@ def excel_export(request):
cell.font = font_bold
col_num += 1
except Exception as exc:
logger.debug('Error in attribute: ' + str(exc))
logger.error('Error in attribute: ' + str(exc))
cell = worksheet.cell(row=row_num, column=col_num, value=key)
continue
cell = worksheet.cell(row=row_num, column=col_num, value='found_by')
cell.font = font_bold
Expand Down Expand Up @@ -998,7 +1001,8 @@ def excel_export(request):
worksheet.cell(row=row_num, column=col_num, value=value)
col_num += 1
except Exception as exc:
logger.debug('Error in attribute: ' + str(exc))
logger.error('Error in attribute: ' + str(exc))
worksheet.cell(row=row_num, column=col_num, value="Value not supported")
continue
worksheet.cell(row=row_num, column=col_num, value=finding.test.test_type.name)
col_num += 1
Expand Down
2 changes: 1 addition & 1 deletion dojo/test/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,7 +599,7 @@ def process_forms(self, request: HttpRequest, test: Test, context: dict):
description=_('Finding "%(title)s" was added by %(user)s') % {
'title': finding.title, 'user': request.user
},
url=request.build_absolute_uri(reverse('view_finding', args=(finding.id,))),
url=reverse("view_finding", args=(finding.id,)),
icon="exclamation-triangle")
# Add a success message
messages.add_message(
Expand Down
38 changes: 21 additions & 17 deletions dojo/tools/qualys/csv_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ def build_findings_from_dict(report_findings: [dict]) -> [Finding]:
for report_finding in report_findings:
if report_finding.get("FQDN"):
endpoint = Endpoint.from_uri(report_finding.get("FQDN"))
elif report_finding.get("DNS"):
endpoint = Endpoint(host=report_finding.get("DNS"))
else:
endpoint = Endpoint(host=report_finding["IP"])

Expand All @@ -123,19 +125,22 @@ def build_findings_from_dict(report_findings: [dict]) -> [Finding]:
cvssv3 = _extract_cvss_vectors(
report_finding["CVSS3.1 Base"], report_finding["CVSS3.1 Temporal"]
)

finding = Finding(
title=f"QID-{report_finding['QID']} | {report_finding['Title']}",
mitigation=report_finding["Solution"],
description=f"{report_finding['Threat']}\nResult Evidence: \n{report_finding.get('Threat', 'Not available')}",
severity=severity_lookup.get(report_finding["Severity"], "Info"),
impact=report_finding["Impact"],
date=parser.parse(
report_finding["Last Detected"].replace("Z", "")
),
vuln_id_from_tool=report_finding["QID"],
cvssv3=cvssv3
)
finding_with_id = next((obj for obj in dojo_findings if obj.vuln_id_from_tool == report_finding["QID"]), None)
if finding_with_id:
finding = finding_with_id
else:
finding = Finding(
title=f"QID-{report_finding['QID']} | {report_finding['Title']}",
mitigation=report_finding["Solution"],
description=f"{report_finding['Threat']}\nResult Evidence: \n{report_finding.get('Threat', 'Not available')}",
severity=severity_lookup.get(report_finding["Severity"], "Info"),
impact=report_finding["Impact"],
date=parser.parse(
report_finding["Last Detected"].replace("Z", "")
),
vuln_id_from_tool=report_finding["QID"],
cvssv3=cvssv3
)

cve_data = report_finding.get("CVE ID")
finding.unsaved_vulnerability_ids = (
Expand Down Expand Up @@ -163,8 +168,7 @@ def build_findings_from_dict(report_findings: [dict]) -> [Finding]:
finding.is_mitigated = False

finding.verified = True
finding.unsaved_endpoints = [endpoint]

dojo_findings.append(finding)

finding.unsaved_endpoints.append(endpoint)
if not finding_with_id:
dojo_findings.append(finding)
return dojo_findings
2 changes: 1 addition & 1 deletion helm/defectdojo/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ apiVersion: v2
appVersion: "2.28.0-dev"
description: A Helm chart for Kubernetes to install DefectDojo
name: defectdojo
version: 1.6.92-dev
version: 1.6.93-dev
icon: https://www.defectdojo.org/img/favicon.ico
maintainers:
- name: madchap
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ redis==5.0.1
requests==2.31.0
sqlalchemy==2.0.22 # Required by Celery broker transport
supervisor==4.2.5
urllib3==1.26.17
urllib3==1.26.18
uWSGI==2.0.22
vobject==0.9.6.1
whitenoise==5.2.0
Expand Down
6 changes: 3 additions & 3 deletions unittests/tools/test_qualys_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def test_parse_file_with_multiple_vuln_has_multiple_findings_csv(self):
for finding in findings:
for endpoint in finding.unsaved_endpoints:
endpoint.clean()
self.assertEqual(6, len(findings))
self.assertEqual(3, len(findings))

finding = findings[0]
self.assertEqual(
Expand All @@ -79,11 +79,11 @@ def test_parse_file_with_multiple_vuln_has_multiple_findings_csv(self):
finding.severity, "Critical"
)
self.assertEqual(
finding.unsaved_endpoints[0].host, "10.98.57.180"
finding.unsaved_endpoints[0].host, "ip-10-98-57-180.eu-west-1.compute.internal"
)

for finding in findings:
if finding.unsaved_endpoints[0].host == "10.98.57.180" and finding.title == "QID-105971 | EOL/Obsolete Software: Microsoft ASP.NET 1.0 Detected":
if finding.unsaved_endpoints[0].host == "ip-10-98-57-180.eu-west-1.compute.internal" and finding.title == "QID-105971 | EOL/Obsolete Software: Microsoft ASP.NET 1.0 Detected":

self.assertEqual(
finding.severity, "Critical"
Expand Down

0 comments on commit eeed7f9

Please sign in to comment.