Skip to content

Commit

Permalink
Merge branch 'bugfix' into fix_kubescape
Browse files Browse the repository at this point in the history
  • Loading branch information
manuel-sommer authored Nov 15, 2024
2 parents 8081806 + cf452c8 commit d10357c
Show file tree
Hide file tree
Showing 9 changed files with 711 additions and 8 deletions.
8 changes: 4 additions & 4 deletions docs/content/en/usage/features.md
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ The environment variable will override the settings in `settings.dist.py`, repla

The available algorithms are:

DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL
DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `unique_id_from_tool`)
: The deduplication occurs based on
finding.unique_id_from_tool which is a unique technical
id existing in the source tool. Few scanners populate this
Expand All @@ -266,12 +266,12 @@ DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL
able to recognise that findings found in previous
scans are actually the same as the new findings.

DEDUPE_ALGO_HASH_CODE
DEDUPE_ALGO_HASH_CODE (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `hash_code`)
: The deduplication occurs based on finding.hash_code. The
hash_code itself is configurable for each scanner in
parameter `HASHCODE_FIELDS_PER_SCANNER`.

DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE
DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `unique_id_from_tool_or_hash_code`)
: A finding is a duplicate with another if they have the same
unique_id_from_tool OR the same hash_code.

Expand All @@ -284,7 +284,7 @@ DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE
cross-parser deduplication


DEDUPE_ALGO_LEGACY
DEDUPE_ALGO_LEGACY (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `legacy`)
: This is algorithm that was in place before the configuration
per parser was made possible, and also the default one for
backward compatibility reasons.
Expand Down
2 changes: 1 addition & 1 deletion dojo/settings/.settings.dist.py.sha256sum
Original file line number Diff line number Diff line change
@@ -1 +1 @@
f6cfd7d4048275a8c0af62d3be5957527c6d0d06d5b0a7d89d8c3ec11faffa2d
f09caa2d4e41f44b7cd6ecf2f1400817d4776e703bd039c8d857f1356382e1f3
17 changes: 17 additions & 0 deletions dojo/settings/settings.dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -1296,6 +1296,12 @@ def saml2_attrib_map_format(dict):
if len(env("DD_HASHCODE_FIELDS_PER_SCANNER")) > 0:
env_hashcode_fields_per_scanner = json.loads(env("DD_HASHCODE_FIELDS_PER_SCANNER"))
for key, value in env_hashcode_fields_per_scanner.items():
if not isinstance(value, list):
msg = f"Fields definition '{value}' for hashcode calculation of '{key}' is not valid. It needs to be list of strings but it is {type(value)}."
raise TypeError(msg)
if not all(isinstance(field, str) for field in value):
msg = f"Fields for hashcode calculation for {key} are not valid. It needs to be list of strings. Some of fields are not string."
raise AttributeError(msg)
if key in HASHCODE_FIELDS_PER_SCANNER:
logger.info(f"Replacing {key} with value {value} (previously set to {HASHCODE_FIELDS_PER_SCANNER[key]}) from env var DD_HASHCODE_FIELDS_PER_SCANNER")
HASHCODE_FIELDS_PER_SCANNER[key] = value
Expand Down Expand Up @@ -1377,6 +1383,13 @@ def saml2_attrib_map_format(dict):
# Makes it possible to deduplicate on a technical id (same parser) and also on some functional fields (cross-parsers deduplication)
DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE = "unique_id_from_tool_or_hash_code"

DEDUPE_ALGOS = [
DEDUPE_ALGO_LEGACY,
DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL,
DEDUPE_ALGO_HASH_CODE,
DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE,
]

# Allows to deduplicate with endpoints if endpoints is not included in the hashcode.
# Possible values are: scheme, host, port, path, query, fragment, userinfo, and user. For a details description see https://hyperlink.readthedocs.io/en/latest/api.html#attributes.
# Example:
Expand Down Expand Up @@ -1526,6 +1539,9 @@ def saml2_attrib_map_format(dict):
if len(env("DD_DEDUPLICATION_ALGORITHM_PER_PARSER")) > 0:
env_dedup_algorithm_per_parser = json.loads(env("DD_DEDUPLICATION_ALGORITHM_PER_PARSER"))
for key, value in env_dedup_algorithm_per_parser.items():
if value not in DEDUPE_ALGOS:
msg = f"DEDUP algorithm '{value}' for '{key}' is not valid. Use one of following values: {', '.join(DEDUPE_ALGOS)}"
raise AttributeError(msg)
if key in DEDUPLICATION_ALGORITHM_PER_PARSER:
logger.info(f"Replacing {key} with value {value} (previously set to {DEDUPLICATION_ALGORITHM_PER_PARSER[key]}) from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER")
DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value
Expand Down Expand Up @@ -1751,6 +1767,7 @@ def saml2_attrib_map_format(dict):
"CWE": "https://cwe.mitre.org/data/definitions/&&.html", # e.g. https://cwe.mitre.org/data/definitions/79.html
"TEMP": "https://security-tracker.debian.org/tracker/", # e.g. https://security-tracker.debian.org/tracker/TEMP-0841856-B18BAF
"DSA": "https://security-tracker.debian.org/tracker/", # e.g. https://security-tracker.debian.org/tracker/DSA-5791-1
"RLSA": "https://errata.rockylinux.org/", # e.g. https://errata.rockylinux.org/RLSA-2024:7001
}
# List of acceptable file types that can be uploaded to a given object via arbitrary file upload
FILE_UPLOAD_TYPES = env("DD_FILE_UPLOAD_TYPES")
Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/bearer_cli/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def get_findings(self, file, test):
finding = Finding(
title=bearerfinding["title"] + " in " + bearerfinding["filename"] + ":" + str(bearerfinding["line_number"]),
test=test,
description=bearerfinding["description"] + "\n Detected code snippet: \n" + bearerfinding["snippet"],
description=bearerfinding["description"] + "\n Detected code snippet: \n" + bearerfinding.get("snippet", bearerfinding.get("code_extract")),
severity=severity,
cwe=bearerfinding["cwe_ids"][0],
static_finding=True,
Expand Down
6 changes: 5 additions & 1 deletion dojo/tools/trivy_operator/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ def get_description_for_scan_types(self, scan_type):

def get_findings(self, scan_file, test):
scan_data = scan_file.read()

try:
data = json.loads(str(scan_data, "utf-8"))
except Exception:
Expand All @@ -29,6 +28,11 @@ def get_findings(self, scan_file, test):
if type(data) is list:
for listitems in data:
findings += self.output_findings(listitems, test)
elif type(data) is dict and bool(set(data.keys()) & {"clustercompliancereports.aquasecurity.github.io", "clusterconfigauditreports.aquasecurity.github.io", "clusterinfraassessmentreports.aquasecurity.github.io", "clusterrbacassessmentreports.aquasecurity.github.io", "configauditreports.aquasecurity.github.io", "exposedsecretreports.aquasecurity.github.io", "infraassessmentreports.aquasecurity.github.io", "rbacassessmentreports.aquasecurity.github.io", "vulnerabilityreports.aquasecurity.github.io"}):
for datakey in list(data.keys()):
if datakey not in ["clustersbomreports.aquasecurity.github.io", "sbomreports.aquasecurity.github.io"]:
for listitems in (data[datakey]):
findings += self.output_findings(listitems, test)
else:
findings += self.output_findings(data, test)
return findings
Expand Down
2 changes: 2 additions & 0 deletions helm/defectdojo/templates/initializer-job.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ metadata:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if and (int .Values.initializer.keepSeconds) (gt (int .Values.initializer.keepSeconds) 0) }}
ttlSecondsAfterFinished: {{ .Values.initializer.keepSeconds }}
{{- end }}
template:
metadata:
labels:
Expand Down
2 changes: 1 addition & 1 deletion helm/defectdojo/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ initializer:
jobAnnotations: {}
annotations: {}
labels: {}
keepSeconds: 60
keepSeconds: 60 # A positive integer will keep this Job and Pod deployed for the specified number of seconds, after which they will be removed. For all other values, the Job and Pod will remain deployed.
affinity: {}
nodeSelector: {}
resources:
Expand Down
Loading

0 comments on commit d10357c

Please sign in to comment.