Skip to content

Commit

Permalink
Merge branch 'dev' into remove_psycopg2
Browse files Browse the repository at this point in the history
  • Loading branch information
manuel-sommer authored Nov 22, 2024
2 parents 94d4f79 + c93fc2c commit 673fe8d
Show file tree
Hide file tree
Showing 41 changed files with 1,176 additions and 73 deletions.
2 changes: 1 addition & 1 deletion Dockerfile.integration-tests-debian
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

# code: language=Dockerfile

FROM openapitools/openapi-generator-cli:v7.9.0@sha256:bb32f5f0c9f5bdbb7b00959e8009de0230aedc200662701f05fc244c36f967ba AS openapitools
FROM openapitools/openapi-generator-cli:v7.10.0@sha256:f2054a5a7908ad81017d0f0839514ba5eab06ae628914ff71554d46fac1bcf7a AS openapitools
FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e AS build
WORKDIR /app
RUN \
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.nginx-alpine
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ COPY manage.py ./
COPY dojo/ ./dojo/
RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true

FROM nginx:1.27.2-alpine@sha256:2140dad235c130ac861018a4e13a6bc8aea3a35f3a40e20c1b060d51a7efd250
FROM nginx:1.27.2-alpine@sha256:74175cf34632e88c6cfe206897cbfe2d2fecf9bf033c40e7f9775a3689e8adc7
ARG uid=1001
ARG appuser=defectdojo
COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.nginx-debian
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ COPY dojo/ ./dojo/

RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true

FROM nginx:1.27.2-alpine@sha256:2140dad235c130ac861018a4e13a6bc8aea3a35f3a40e20c1b060d51a7efd250
FROM nginx:1.27.2-alpine@sha256:74175cf34632e88c6cfe206897cbfe2d2fecf9bf033c40e7f9775a3689e8adc7
ARG uid=1001
ARG appuser=defectdojo
COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/
Expand Down
2 changes: 2 additions & 0 deletions dojo/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -2393,6 +2393,7 @@ class EndpointFilterHelper(FilterSet):
fields=(
("product", "product"),
("host", "host"),
("id", "id"),
),
)

Expand Down Expand Up @@ -2630,6 +2631,7 @@ class ApiEndpointFilter(DojoFilter):
fields=(
("host", "host"),
("product", "product"),
("id", "id"),
),
)

Expand Down
6 changes: 3 additions & 3 deletions dojo/jira_link/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,10 @@ def webhook(request, secret=None):
findings = None
# Determine what type of object we will be working with
if jissue.finding:
logging.debug(f"Received issue update for {jissue.jira_key} for finding {jissue.finding.id}")
logger.debug(f"Received issue update for {jissue.jira_key} for finding {jissue.finding.id}")
findings = [jissue.finding]
elif jissue.finding_group:
logging.debug(f"Received issue update for {jissue.jira_key} for finding group {jissue.finding_group}")
logger.debug(f"Received issue update for {jissue.jira_key} for finding group {jissue.finding_group}")
findings = jissue.finding_group.findings.all()
elif jissue.engagement:
return webhook_responser_handler("debug", "Update for engagement ignored")
Expand Down Expand Up @@ -228,7 +228,7 @@ def check_for_and_create_comment(parsed_json):
jissue = JIRA_Issue.objects.get(jira_id=jid)
except JIRA_Instance.DoesNotExist:
return webhook_responser_handler("info", f"JIRA issue {jid} is not linked to a DefectDojo Finding")
logging.debug(f"Received issue comment for {jissue.jira_key}")
logger.debug(f"Received issue comment for {jissue.jira_key}")
logger.debug("jissue: %s", vars(jissue))

jira_usernames = JIRA_Instance.objects.values_list("username", flat=True)
Expand Down
2 changes: 1 addition & 1 deletion dojo/management/commands/print_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,4 @@ def handle(self, *args, **options):
value = getattr(settings, attr)
a_dict[attr] = value

logging.info(pformat(a_dict))
logger.info(pformat(a_dict))
6 changes: 3 additions & 3 deletions dojo/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1786,7 +1786,7 @@ def clean(self):
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
for remove_str in null_char_list:
self.path = self.path.replace(remove_str, "%00")
logging.error(f'Path "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
logger.error(f'Path "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
if self.path == "":
self.path = None

Expand All @@ -1799,7 +1799,7 @@ def clean(self):
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
for remove_str in null_char_list:
self.query = self.query.replace(remove_str, "%00")
logging.error(f'Query "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
logger.error(f'Query "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
if self.query == "":
self.query = None

Expand All @@ -1812,7 +1812,7 @@ def clean(self):
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
for remove_str in null_char_list:
self.fragment = self.fragment.replace(remove_str, "%00")
logging.error(f'Fragment "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
logger.error(f'Fragment "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
if self.fragment == "":
self.fragment = None

Expand Down
2 changes: 1 addition & 1 deletion dojo/notes/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ def delete_related_notes(obj):
if not hasattr(obj, "notes"):
logger.warning(f"Attempted to delete notes from object type {type(obj)} without 'notes' attribute.")
return
logging.debug(f"Deleting {obj.notes.count()} notes for {type(obj).__name__} {obj.id}")
logger.debug(f"Deleting {obj.notes.count()} notes for {type(obj).__name__} {obj.id}")
obj.notes.all().delete()
2 changes: 1 addition & 1 deletion dojo/notes/signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@


def delete_note_history(note):
logging.debug(f"Deleting history for note {note.id}")
logger.debug(f"Deleting history for note {note.id}")
note.history.all().delete()


Expand Down
2 changes: 1 addition & 1 deletion dojo/settings/.settings.dist.py.sha256sum
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1ce38ebc213264f59377df3684c2f7aa8f3cf76657d8b500f4a7487b812b39fb
563b0e84cee202927fbaeb939ac52fbe6e6b2e3ea153e00eda7a10267bec60c2
2 changes: 1 addition & 1 deletion dojo/settings/settings.dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -1268,7 +1268,7 @@ def saml2_attrib_map_format(dict):
"NeuVector (compliance)": ["title", "vuln_id_from_tool", "description"],
"Wpscan": ["title", "description", "severity"],
"Popeye Scan": ["title", "description"],
"Nuclei Scan": ["title", "cwe", "severity"],
"Nuclei Scan": ["title", "cwe", "severity", "component_name"],
"KubeHunter Scan": ["title", "description"],
"kube-bench Scan": ["title", "vuln_id_from_tool", "description"],
"Threagile risks report": ["title", "cwe", "severity"],
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion dojo/templates/dojo/endpoints.html
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ <h3 class="has-filters">
{% endif %}
{% include "dojo/snippets/tags.html" with tags=e.tags.all %}
</td>
{% if not product_tab %}
{% if not product_tab and e.product %}
<td>
<a href="{% url 'view_product' e.product.id %}">{{ e.product }}</a>
{% include "dojo/snippets/tags.html" with tags=e.product.tags.all %}
Expand Down
4 changes: 2 additions & 2 deletions dojo/tools/api_sonarqube/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def import_issues(self, test):
organization=organization,
branch=test.branch_tag,
)
logging.info(
logger.info(
f'Found {len(issues)} issues for component {component["key"]}',
)

Expand Down Expand Up @@ -247,7 +247,7 @@ def import_hotspots(self, test):
organization=organization,
branch=test.branch_tag,
)
logging.info(
logger.info(
f'Found {len(hotspots)} hotspots for project {component["key"]}',
)
sonarUrl = client.sonar_api_url[:-3] # [:-3] removes the /api part of the sonarqube/cloud URL
Expand Down
112 changes: 97 additions & 15 deletions dojo/tools/mend/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from dojo.models import Finding

__author__ = "dr3dd589"
__author__ = "dr3dd589 + testaccount90009 aka SH"

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -35,7 +35,55 @@ def _build_common_output(node, lib_name=None):
cve = None
component_name = None
component_version = None
if "library" in node:
impact = None
description = "No Description Available"
cvss3_score = None
mitigation = "N/A"
if "component" in node:
description = (
"**Vulnerability Description**: "
+ node["vulnerability"].get("description", "No Description Available")
+ "\n\n"
+ "**Component Name**: "
+ node["component"].get("name", "")
+ "\n"
+ "**Component Type**: "
+ node["component"].get("componentType", "")
+ "\n"
+ "**Root Library**: "
+ str(node["component"].get("rootLibrary", ""))
+ "\n"
+ "**Library Type**: "
+ node["component"].get("libraryType", "")
+ "\n"
+ "**Location Found**: "
+ node["component"].get("path", "")
+ "\n"
+ "**Direct or Transitive Dependency**: "
+ node["component"].get("dependencyType", "")
+ "\n"
)
lib_name = node["component"].get("name")
component_name = node["component"].get("artifactId")
component_version = node["component"].get("version")
impact = node["component"].get("dependencyType")
cvss3_score = node["vulnerability"].get("score", None)
if "topFix" in node:
try:
topfix_node = node.get("topFix")
mitigation = (
"**Resolution**: "
+ topfix_node.get("date", "")
+ "\n"
+ topfix_node.get("message", "")
+ "\n"
+ topfix_node.get("fixResolution", "")
+ "\n"
)
except Exception:
logger.exception("Error handling topFix node.")

elif "library" in node:
node.get("project")
description = (
"**Description** : "
Expand All @@ -57,8 +105,18 @@ def _build_common_output(node, lib_name=None):
lib_name = node["library"].get("filename")
component_name = node["library"].get("artifactId")
component_version = node["library"].get("version")
cvss3_score = node.get("cvss3_score", None)
if "topFix" in node:
try:
topfix_node = node.get("topFix")
mitigation = "**Resolution** ({}): {}\n".format(
topfix_node.get("date"),
topfix_node.get("fixResolution"),
)
except Exception:
logger.exception("Error handling topFix node.")
else:
description = node.get("description")
description = node.get("description", "Unknown")

cve = node.get("name")
if cve is None:
Expand All @@ -69,27 +127,29 @@ def _build_common_output(node, lib_name=None):
# homogeneous behavior.
if "cvss3_severity" in node:
cvss_sev = node.get("cvss3_severity")
elif "vulnerability" in node:
cvss_sev = node["vulnerability"].get("severity")
else:
cvss_sev = node.get("severity")
severity = cvss_sev.lower().capitalize()

cvss3_score = node.get("cvss3_score", None)
cvss3_vector = node.get("scoreMetadataVector", None)
severity_justification = "CVSS v3 score: {} ({})".format(
cvss3_score if cvss3_score is not None else "N/A", cvss3_vector if cvss3_vector is not None else "N/A",
)
cwe = 1035 # default OWASP a9 until the report actually has them

mitigation = "N/A"
if "topFix" in node:
try:
topfix_node = node.get("topFix")
mitigation = "**Resolution** ({}): {}\n".format(
topfix_node.get("date"),
topfix_node.get("fixResolution"),
)
except Exception:
logger.exception("Error handling topFix node.")
# comment out the below for now - working on adding this into the above conditional statements since format can be slightly different
# mitigation = "N/A"
# if "topFix" in node:
# try:
# topfix_node = node.get("topFix")
# mitigation = "**Resolution** ({}): {}\n".format(
# topfix_node.get("date"),
# topfix_node.get("fixResolution"),
# )
# except Exception:
# logger.exception("Error handling topFix node.")

filepaths = []
if "sourceFiles" in node:
Expand Down Expand Up @@ -134,6 +194,7 @@ def _build_common_output(node, lib_name=None):
dynamic_finding=True,
cvssv3=cvss3_vector,
cvssv3_score=float(cvss3_score) if cvss3_score is not None else None,
impact=impact,
)
if cve:
new_finding.unsaved_vulnerability_ids = [cve]
Expand Down Expand Up @@ -164,8 +225,29 @@ def _build_common_output(node, lib_name=None):
for node in tree_node:
findings.append(_build_common_output(node))

elif "components" in content:
# likely a Mend Platform or 3.0 API SCA output - "library" is replaced as "component"
tree_components = content.get("components")
for comp_node in tree_components:
# get component info here, before going into vulns
if (
"response" in comp_node
and len(comp_node.get("response")) > 0
):
for vuln in comp_node.get("response"):
findings.append(
_build_common_output(vuln, comp_node.get("name")),
)

elif "response" in content:
# New schema: handle response array
tree_node = content["response"]
if tree_node:
for node in tree_node:
findings.append(_build_common_output(node))

def create_finding_key(f: Finding) -> str:
"""Hashes the finding's description and title to retrieve a key for deduplication."""
# """Hashes the finding's description and title to retrieve a key for deduplication."""
return hashlib.md5(
f.description.encode("utf-8")
+ f.title.encode("utf-8"),
Expand Down
14 changes: 10 additions & 4 deletions helm/defectdojo/templates/celery-beat-deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ metadata:
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
helm.sh/chart: {{ include "defectdojo.chart" . }}
{{- with .Values.extraLabels }}
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
replicas: {{ .Values.celery.beat.replicas }}
{{- if .Values.revisionHistoryLimit }}
Expand All @@ -28,8 +31,11 @@ spec:
defectdojo.org/subcomponent: beat
app.kubernetes.io/name: {{ include "defectdojo.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- if .Values.podLabels }}
{{- toYaml .Values.podLabels | nindent 8 }}
{{- with .Values.extraLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.podLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
annotations:
{{- with .Values.celery.beat.annotations }}
Expand Down Expand Up @@ -146,8 +152,8 @@ spec:
secretKeyRef:
name: {{ $fullName }}
key: DD_SECRET_KEY
{{- if .Values.extraEnv }}
{{- toYaml .Values.extraEnv | nindent 8 }}
{{- with .Values.extraEnv }}
{{- toYaml . | nindent 8 }}
{{- end }}
resources:
{{- toYaml .Values.celery.beat.resources | nindent 10 }}
Expand Down
14 changes: 10 additions & 4 deletions helm/defectdojo/templates/celery-worker-deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ metadata:
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
helm.sh/chart: {{ include "defectdojo.chart" . }}
{{- with .Values.extraLabels }}
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
replicas: {{ .Values.celery.worker.replicas }}
{{- if .Values.revisionHistoryLimit }}
Expand All @@ -28,8 +31,11 @@ spec:
defectdojo.org/subcomponent: worker
app.kubernetes.io/name: {{ include "defectdojo.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- if .Values.podLabels }}
{{- toYaml .Values.podLabels | nindent 8 }}
{{- with .Values.extraLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.podLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
annotations:
{{- with .Values.celery.worker.annotations }}
Expand Down Expand Up @@ -141,8 +147,8 @@ spec:
secretKeyRef:
name: {{ $fullName }}
key: DD_SECRET_KEY
{{- if .Values.extraEnv }}
{{- toYaml .Values.extraEnv | nindent 8 }}
{{- with .Values.extraEnv }}
{{- toYaml . | nindent 8 }}
{{- end }}
resources:
{{- toYaml .Values.celery.worker.resources | nindent 10 }}
Expand Down
Loading

0 comments on commit 673fe8d

Please sign in to comment.