Skip to content

Commit

Permalink
Merge pull request #10604 from DefectDojo/master-into-dev/2.36.4-2.37…
Browse files Browse the repository at this point in the history
….0-dev

Release: Merge back 2.36.4 into dev from: master-into-dev/2.36.4-2.37.0-dev
  • Loading branch information
Maffooch authored Jul 22, 2024
2 parents 93268fd + 0e86398 commit d2d2ca9
Show file tree
Hide file tree
Showing 8 changed files with 157 additions and 18 deletions.
7 changes: 5 additions & 2 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2064,7 +2064,7 @@ class ImportScanSerializer(serializers.Serializer):
queryset=Endpoint.objects.all(),
required=False,
default=None,
help_text="The IP address, host name or full URL. It must be valid",
help_text="Enter the ID of an Endpoint that is associated with the target Product. New Findings will be added to that Endpoint.",
)
file = serializers.FileField(allow_empty_file=True, required=False)
product_type_name = serializers.CharField(required=False)
Expand Down Expand Up @@ -2331,7 +2331,10 @@ class ReImportScanSerializer(TaggitSerializer, serializers.Serializer):
choices=get_choices_sorted(), required=True,
)
endpoint_to_add = serializers.PrimaryKeyRelatedField(
queryset=Endpoint.objects.all(), default=None, required=False,
queryset=Endpoint.objects.all(),
required=False,
default=None,
help_text="Enter the ID of an Endpoint that is associated with the target Product. New Findings will be added to that Endpoint.",
)
file = serializers.FileField(allow_empty_file=True, required=False)
product_type_name = serializers.CharField(required=False)
Expand Down
13 changes: 7 additions & 6 deletions dojo/finding/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1272,17 +1272,18 @@ def close_finding(request, fid):
finding_in_group = finding.has_finding_group
# Check if there is a jira issue that needs to be updated
jira_issue_exists = finding.has_jira_issue or (finding.finding_group and finding.finding_group.has_jira_issue)
# fetch the project
jira_instance = jira_helper.get_jira_instance(finding)
jira_project = jira_helper.get_jira_project(finding)
# Only push if the finding is not in a group
if jira_issue_exists:
# Determine if any automatic sync should occur
push_to_jira = jira_helper.is_push_all_issues(finding) \
or jira_helper.get_jira_instance(finding).finding_jira_sync
# Add the closing note
if push_to_jira and not finding_in_group:
jira_helper.add_comment(finding, new_note, force_push=True)
push_to_jira = jira_helper.is_push_all_issues(finding) or jira_instance.finding_jira_sync
# Add the closing note
if (jira_project.push_notes or push_to_jira) and not finding_in_group:
jira_helper.add_comment(finding, new_note, force_push=True)
# Save the finding
finding.save(push_to_jira=(push_to_jira and not finding_in_group))

# we only push the group after saving the finding to make sure
# the updated data of the finding is pushed as part of the group
if push_to_jira and finding_in_group:
Expand Down
5 changes: 3 additions & 2 deletions dojo/importers/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,11 @@ def compress_options(self):
# Accommodate lists of fields
elif isinstance(value, list) and len(value) > 0 and isinstance(value[0], Model):
id_list = [item.id for item in value]
item_type = type(value[0])
class_name = None
# Get the actual class if available
if len(id_list) > 0:
class_name = type(id_list[0])
class_name = item_type
# Ensure we are not setting a class name as None
if class_name is type(None):
compressed_fields[field] = value
Expand All @@ -148,7 +149,7 @@ def decompress_options(self):
if class_name is type(None):
model_list = model_value
else:
model_list = [class_name.objects.get(id=model_id) for model_id in model_value]
model_list = list(class_name.objects.filter(id__in=model_value))
decompressed_fields[field] = model_list
elif isinstance(model_value, int):
# Check for SimpleLazyObject that will be user objects
Expand Down
7 changes: 4 additions & 3 deletions dojo/tools/aqua/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,12 @@ def get_items(self, tree, test):

for node in vulnerabilityTree:
resource = node.get("resource")
vulnerabilities = node.get("vulnerabilities")

vulnerabilities = node.get("vulnerabilities", [])
if vulnerabilities is None:
vulnerabilities = []
for vuln in vulnerabilities:
item = get_item(resource, vuln, test)
unique_key = resource.get("cpe") + vuln.get("name", "None")
unique_key = resource.get("cpe") + vuln.get("name", "None") + resource.get("path", "None")
items[unique_key] = item
elif "cves" in tree:
for cve in tree["cves"]:
Expand Down
8 changes: 4 additions & 4 deletions helm/defectdojo/Chart.lock
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ dependencies:
version: 9.19.1
- name: postgresql
repository: https://charts.bitnami.com/bitnami
version: 15.5.16
version: 15.5.17
- name: postgresql-ha
repository: https://charts.bitnami.com/bitnami
version: 9.4.11
Expand All @@ -13,6 +13,6 @@ dependencies:
version: 14.4.6
- name: redis
repository: https://charts.bitnami.com/bitnami
version: 19.6.1
digest: sha256:87e072ffd03b4de3bec0a96f880de20b2d0e89673295579420d233f3d6659686
generated: "2024-07-15T18:17:40.021560047Z"
version: 19.6.2
digest: sha256:797914213a13f15ef96ce0ba1ff8f8e3b3025f6f548a732cb845650f01ccdefa
generated: "2024-07-22T15:50:25.70755734Z"
2 changes: 1 addition & 1 deletion helm/defectdojo/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ apiVersion: v2
appVersion: "2.37.0-dev"
description: A Helm chart for Kubernetes to install DefectDojo
name: defectdojo
version: 1.6.141-dev
version: 1.6.142-dev
icon: https://www.defectdojo.org/img/favicon.ico
maintainers:
- name: madchap
Expand Down
127 changes: 127 additions & 0 deletions unittests/scans/aqua/issue_10585.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
{
"image": "your_image:latest",
"scan_started": {
"seconds": 1567784942,
"nanos": 28041437
},
"scan_duration": 25,
"image_size": 565733981,
"digest": "54bc57e4e876533bc61ba7bf229f0f9f96d137b787614c3d0d5c70c3578fe867",
"os": "alpine",
"version": "3.9.4",
"resources": [
{
"resource": {
"format": "apk",
"name": "musl",
"version": "1.1.20-r4",
"arch": "x86_64",
"cpe": "pkg:/alpine:3.9.4:musl:1.1.20-r4",
"license": "MIT"
},
"scanned": true,
"vulnerabilities": null
}
],
"image_assurance_results": {
"disallowed": true,
"audit_required": true,
"policy_failures": [
{
"policy_id": 1,
"policy_name": "Default",
"blocking": true,
"controls": [
"max_severity"
]
},
{
"policy_id": 6,
"policy_name": "Assurance_policy",
"blocking": true,
"controls": [
"max_score"
]
}
],
"checks_performed": [
{
"failed": true,
"policy_id": 1,
"policy_name": "Default",
"control": "max_severity",
"maximum_severity_allowed": "high",
"maximum_severity_found": "high",
"maximum_fixable_severity_found": "high",
"no_fix_excluded": true
},
{
"policy_id": 1,
"policy_name": "Default",
"control": "malware"
},
{
"policy_id": 1,
"policy_name": "Default",
"control": "sensitive_data"
},
{
"policy_id": 1,
"policy_name": "Default",
"control": "root_user"
},
{
"failed": true,
"policy_id": 6,
"policy_name": "Assurance_policy",
"control": "max_score",
"maximum_score_allowed": 7,
"maximum_score_found": 7.5,
"maximum_fixable_score_found": 7.5,
"no_fix_excluded": true
},
{
"policy_id": 6,
"policy_name": "Assurance_policy",
"control": "malware"
},
{
"policy_id": 6,
"policy_name": "Assurance_policy",
"control": "sensitive_data"
},
{
"policy_id": 6,
"policy_name": "Assurance_policy",
"control": "root_user"
}
],
"block_required": true
},
"vulnerability_summary": {
"total": 24,
"high": 5,
"medium": 18,
"low": 1,
"negligible": 0,
"sensitive": 0,
"malware": 0,
"score_average": 5.454168,
"max_score": 7.5,
"max_fixable_score": 7.5,
"max_fixable_severity": "high"
},
"scan_options": {
"scan_sensitive_data": true,
"scan_malware": true,
"scan_timeout": 3600000000000,
"manual_pull_fallback": true,
"save_adhoc_scans": true
},
"initiating_user": "chk",
"data_date": 1567724137,
"pull_name": "your_image:latest",
"changed_result": false,
"required_image_platform": "amd64:::",
"scanned_image_platform": "amd64::linux:"
}
6 changes: 6 additions & 0 deletions unittests/tools/test_aqua_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,9 @@ def test_aqua_parser_for_aqua_severity(self):
self.assertEqual(2, d["Medium"])
self.assertEqual(2, d["Low"])
self.assertEqual(7, d["Info"])

def test_aqua_parser_issue_10585(self):
with open("unittests/scans/aqua/issue_10585.json") as testfile:
parser = AquaParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(0, len(findings))

0 comments on commit d2d2ca9

Please sign in to comment.