Skip to content

Commit

Permalink
Ruff: Add more PLW
Browse files Browse the repository at this point in the history
  • Loading branch information
kiblik committed Sep 17, 2024
1 parent 22a0ffe commit 4448dd5
Show file tree
Hide file tree
Showing 14 changed files with 48 additions and 36 deletions.
4 changes: 2 additions & 2 deletions dojo/endpoint/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,12 +325,12 @@ def edit_meta_data(request, eid):
endpoint = Endpoint.objects.get(id=eid)

if request.method == "POST":
for key, value in request.POST.items():
for key, orig_value in request.POST.items():
if key.startswith("cfv_"):
cfv_id = int(key.split("_")[1])
cfv = get_object_or_404(DojoMeta, id=cfv_id)

value = value.strip()
value = orig_value.strip()
if value:
cfv.value = value
cfv.save()
Expand Down
6 changes: 4 additions & 2 deletions dojo/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -2379,8 +2379,10 @@ def get_jira_issue_template_dir_choices():

for dirname in dirnames:
if base_dir.startswith(settings.TEMPLATE_DIR_PREFIX):
base_dir = base_dir[len(settings.TEMPLATE_DIR_PREFIX):]
template_dir_list.append((os.path.join(base_dir, dirname), dirname))
clean_base_dir = base_dir[len(settings.TEMPLATE_DIR_PREFIX):]
else:
clean_base_dir = base_dir
template_dir_list.append((os.path.join(clean_base_dir, dirname), dirname))

logger.debug("templates: %s", template_dir_list)
return template_dir_list
Expand Down
4 changes: 2 additions & 2 deletions dojo/importers/default_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,9 +155,9 @@ def process_findings(
logger.debug("starting import of %i parsed findings.", len(parsed_findings) if parsed_findings else 0)
group_names_to_findings_dict = {}

for unsaved_finding in parsed_findings:
for non_clean_unsaved_finding in parsed_findings:
# make sure the severity is something is digestible
unsaved_finding = self.sanitize_severity(unsaved_finding)
unsaved_finding = self.sanitize_severity(non_clean_unsaved_finding)
# Filter on minimum severity if applicable
if Finding.SEVERITIES[unsaved_finding.severity] > Finding.SEVERITIES[self.minimum_severity]:
# finding's severity is below the configured threshold : ignoring the finding
Expand Down
4 changes: 2 additions & 2 deletions dojo/importers/default_reimporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,9 +178,9 @@ def process_findings(
logger.debug("STEP 1: looping over findings from the reimported report and trying to match them to existing findings")
deduplicationLogger.debug(f"Algorithm used for matching new findings to existing findings: {self.deduplication_algorithm}")

for unsaved_finding in parsed_findings:
for non_clean_unsaved_finding in parsed_findings:
# make sure the severity is something is digestible
unsaved_finding = self.sanitize_severity(unsaved_finding)
unsaved_finding = self.sanitize_severity(non_clean_unsaved_finding)
# Filter on minimum severity if applicable
if Finding.SEVERITIES[unsaved_finding.severity] > Finding.SEVERITIES[self.minimum_severity]:
# finding's severity is below the configured threshold : ignoring the finding
Expand Down
4 changes: 2 additions & 2 deletions dojo/product/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1234,11 +1234,11 @@ def add_meta_data(request, pid):
def edit_meta_data(request, pid):
prod = Product.objects.get(id=pid)
if request.method == "POST":
for key, value in request.POST.items():
for key, orig_value in request.POST.items():
if key.startswith("cfv_"):
cfv_id = int(key.split("_")[1])
cfv = get_object_or_404(DojoMeta, id=cfv_id)
value = value.strip()
value = orig_value.strip()
if value:
cfv.value = value
cfv.save()
Expand Down
8 changes: 4 additions & 4 deletions dojo/search/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -500,15 +500,15 @@ def apply_tag_filters(qs, operators, skip_relations=False):

# negative search based on not- prefix (not-tags, not-test-tags, not-engagement-tags, not-product-tags, etc)

for tag_filter in tag_filters:
tag_filter = "not-" + tag_filter
for base_tag_filter in tag_filters:
tag_filter = "not-" + base_tag_filter
if tag_filter in operators:
value = operators[tag_filter]
value = ",".join(value) # contains needs a single value
qs = qs.exclude(**{"{}tags__name__contains".format(tag_filters[tag_filter.replace("not-", "")]): value})

for tag_filter in tag_filters:
tag_filter = "not-" + tag_filter
for base_tag_filter in tag_filters:
tag_filter = "not-" + base_tag_filter
if tag_filter + "s" in operators:
value = operators[tag_filter + "s"]
qs = qs.exclude(**{"{}tags__name__in".format(tag_filters[tag_filter.replace("not-", "")]): value})
Expand Down
8 changes: 4 additions & 4 deletions dojo/tools/hcl_appscan/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def get_findings(self, file, test):
case "port":
port = self.xmltreehelper(item)
description = description + "Port:" + port + "\n"
finding = Finding(
prepared_finding = Finding(
title=title,
description=description,
severity=severity,
Expand All @@ -111,11 +111,11 @@ def get_findings(self, file, test):
dynamic_finding=True,
static_finding=False,
)
findings.append(finding)
findings.append(prepared_finding)
try:
finding.unsaved_endpoints = []
prepared_finding.unsaved_endpoints = []
endpoint = Endpoint(host=host, port=port)
finding.unsaved_endpoints.append(endpoint)
prepared_finding.unsaved_endpoints.append(endpoint)
except UnboundLocalError:
pass
return findings
Expand Down
4 changes: 2 additions & 2 deletions dojo/tools/intsights/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def get_findings(self, file, test):
raise ValueError(msg)
for alert in alerts:
dupe_key = alert["alert_id"]
alert = Finding(
uniq_alert = Finding(
title=alert["title"],
test=test,
active=False if alert["status"] == "Closed" else True,
Expand All @@ -67,7 +67,7 @@ def get_findings(self, file, test):
dynamic_finding=True,
unique_id_from_tool=alert["alert_id"],
)
duplicates[dupe_key] = alert
duplicates[dupe_key] = uniq_alert
if dupe_key not in duplicates:
duplicates[dupe_key] = True
return duplicates.values()
4 changes: 2 additions & 2 deletions dojo/tools/jfrog_xray_on_demand_binary_scan/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,8 @@ def get_item_set(vulnerability):
cvss_v3 = cves[0]["cvss_v3_vector"]
cvssv3 = CVSS3(cvss_v3).clean_vector()

for component_name, component in vulnerability.get("components", {}).items():
component_name, component_version = get_component_name_version(component_name)
for component_name_with_version, component in vulnerability.get("components", {}).items():
component_name, component_version = get_component_name_version(component_name_with_version)
mitigation, impact = process_component(component)

title = clean_title(vulnerability["summary"])
Expand Down
4 changes: 2 additions & 2 deletions dojo/tools/mobsf/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ def get_findings(self, filename, test):
if "urls" in data:
curl = ""
for url in data["urls"]:
for curl in url["urls"]:
curl = f"{curl}\n"
for durl in url["urls"]:
curl = f"{durl}\n"

if curl:
test_description = f"{test_description}\n**URL's:**\n {curl}\n"
Expand Down
12 changes: 8 additions & 4 deletions dojo/tools/qualys_webapp/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,12 +365,14 @@ def get_unique_items(
qid = int(finding.vuln_id_from_tool)
if qid in g_qid_list:
index = g_qid_list.index(qid)
finding = get_glossary_item(
final_finding = get_glossary_item(
glossary[index], finding, is_info=True, enable_weakness=enable_weakness,
)
else:
final_finding = finding
if qid in ig_qid_list:
index = ig_qid_list.index(qid)
findings[unique_id] = get_info_item(info_gathered[index], finding)
findings[unique_id] = get_info_item(info_gathered[index], final_finding)
return findings


Expand Down Expand Up @@ -402,12 +404,14 @@ def get_items(
).items():
if qid in g_qid_list:
index = g_qid_list.index(qid)
finding = get_glossary_item(
final_finding = get_glossary_item(
glossary[index], finding, is_info=True, enable_weakness=enable_weakness,
)
else:
final_finding = finding
if qid in ig_qid_list:
index = ig_qid_list.index(qid)
findings[qid] = get_info_item(info_gathered[index], finding)
findings[qid] = get_info_item(info_gathered[index], final_finding)

return findings

Expand Down
4 changes: 2 additions & 2 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@ select = [
"G001", "G002", "G1", "G2",
"INP",
"RET",
"SLOT",
"PIE",
"T20",
"Q",
"RSE",
"SLOT",
"TID",
"TCH",
"INT",
Expand All @@ -68,7 +68,7 @@ select = [
"PGH",
"PLE",
"PLR0915",
"PLW15",
"PLW1", "PLW2", "PLW3",
"TRY003",
"TRY004",
"TRY2",
Expand Down
6 changes: 4 additions & 2 deletions unittests/test_apiv2_scan_import_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,14 @@ def setUp(self):
def import_zap_scan(self, upload_empty_scan=False):
with open("tests/zap_sample.xml", encoding="utf-8") as file:
if upload_empty_scan:
file = SimpleUploadedFile("zap_sample.xml", self.EMPTY_ZAP_SCAN.encode("utf-8"))
tested_file = SimpleUploadedFile("zap_sample.xml", self.EMPTY_ZAP_SCAN.encode("utf-8"))
else:
tested_file = file

self.payload = {
"engagement": 1,
"scan_type": "ZAP Scan",
"file": file,
"file": tested_file,
}
test_ids = list(Test.objects.values_list("id", flat=True))
r = self.client.post(reverse("importscan-list"), self.payload)
Expand Down
12 changes: 8 additions & 4 deletions unittests/test_rest_framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -526,8 +526,10 @@ def test_list_prefetch(self):

for value in values:
if not isinstance(value, int):
value = value["id"]
self.assertIn(value, objs["prefetch"][field])
clean_value = value["id"]
else:
clean_value = value
self.assertIn(clean_value, objs["prefetch"][field])

# TODO: add schema check

Expand Down Expand Up @@ -610,12 +612,14 @@ def test_update(self):
if key not in ["push_to_jira", "ssh", "password", "api_key"]:
# Convert data to sets to avoid problems with lists
if isinstance(value, list):
value = set(value)
clean_value = set(value)
else:
clean_value = value
if isinstance(response.data[key], list):
response_data = set(response.data[key])
else:
response_data = response.data[key]
self.assertEqual(value, response_data)
self.assertEqual(clean_value, response_data)

self.assertNotIn("push_to_jira", response.data)
self.assertNotIn("ssh", response.data)
Expand Down

0 comments on commit 4448dd5

Please sign in to comment.