Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

#11210 prowler_v4.py Prowler v4.5.0 changed the 'event_time' key in finding with 'time_dt' #11211

Closed
wants to merge 13 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion components/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"metismenu": "~3.0.7",
"moment": "^2.30.1",
"morris.js": "morrisjs/morris.js",
"pdfmake": "^0.2.14",
"pdfmake": "^0.2.15",
"startbootstrap-sb-admin-2": "1.0.7"
},
"engines": {
Expand Down
532 changes: 33 additions & 499 deletions components/yarn.lock

Large diffs are not rendered by default.

7 changes: 7 additions & 0 deletions docs/content/en/getting_started/upgrading/2.41.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
title: 'Upgrading to DefectDojo Version 2.41.x'
toc_hide: true
weight: -20241104
description: No special instructions.
---
There are no special instructions for upgrading to 2.41.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.41.0) for the contents of the release.
2 changes: 1 addition & 1 deletion dojo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa: F401

__version__ = "2.40.0"
__version__ = "2.41.0-dev"
__url__ = "https://github.com/DefectDojo/django-DefectDojo"
__docs__ = "https://documentation.defectdojo.com"
3 changes: 3 additions & 0 deletions dojo/api_v2/prefetch/prefetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ def _build_serializers():

Returns:
dict[model, serializer]: map of model to their serializer

"""

def _is_model_serializer(obj):
Expand Down Expand Up @@ -61,6 +62,7 @@ def _find_serializer(self, field_type):

Returns:
rest_framework.serializers.ModelSerializer: The serializer if one has been found or None

"""
# If the type is represented in the map then return the serializer
if field_type in self._serializers:
Expand All @@ -80,6 +82,7 @@ def _prefetch(self, entry, fields_to_fetch):
Args:
entry (ModelInstance): Instance of a model as returned by a django queryset
field_to_fetch (list[string]): fields to prefetch

"""
for field_to_fetch in fields_to_fetch:
# Get the field from the instance
Expand Down
3 changes: 3 additions & 0 deletions dojo/api_v2/prefetch/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ def _is_many_to_many_relation(field):

Returns:
bool: true if the field is a many-to-many relationship

"""
return isinstance(field, related.ManyToManyDescriptor)

Expand All @@ -27,6 +28,7 @@ def _is_one_to_one_relation(field):

Returns:
bool: true if the field is a one-to-one relationship

"""
return isinstance(field, related.ForwardManyToOneDescriptor)

Expand All @@ -38,6 +40,7 @@ def _get_prefetchable_fields(serializer):

Args:
serializer (Serializer): [description]

"""

def _is_field_prefetchable(field):
Expand Down
3 changes: 2 additions & 1 deletion dojo/jira_link/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json
import logging
import os
from pathlib import Path
from typing import Any

import requests
Expand Down Expand Up @@ -1187,7 +1188,7 @@ def is_jira_project_valid(jira_project):
def jira_attachment(finding, jira, issue, file, jira_filename=None):
basename = file
if jira_filename is None:
basename = os.path.basename(file)
basename = Path(file).name

# Check to see if the file has been uploaded to Jira
# TODO: JIRA: check for local existince of attachment as it currently crashes if local attachment doesn't exist
Expand Down
7 changes: 4 additions & 3 deletions dojo/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import re
import warnings
from datetime import datetime
from pathlib import Path
from uuid import uuid4

import hyperlink
Expand Down Expand Up @@ -3575,9 +3576,9 @@ class Check_List(models.Model):

@staticmethod
def get_status(pass_fail):
if pass_fail == "Pass":
if pass_fail == "Pass": # noqa: S105
return "success"
if pass_fail == "Fail":
if pass_fail == "Fail": # noqa: S105
return "danger"
return "warning"

Expand Down Expand Up @@ -3662,7 +3663,7 @@ def filename(self):
# logger.debug('path: "%s"', self.path)
if not self.path:
return None
return os.path.basename(self.path.name)
return Path(self.path.name).name

@property
def name_and_expiration_info(self):
Expand Down
3 changes: 2 additions & 1 deletion dojo/tools/aws_prowler_v3plus/prowler_v4.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ def process_ocsf_json(self, file, test):
documentation = deserialized.get("remediation", {}).get("references", "")
documentation = str(documentation) + "\n" + str(deserialized.get("unmapped", {}).get("related_url", ""))
security_domain = deserialized.get("resources", [{}])[0].get("type", "")
timestamp = deserialized.get("event_time")
# Prowler v4.5.0 changed 'event_time' key in report with 'time_dt'
timestamp = deserialized.get("time_dt") or deserialized.get("event_time")
resource_arn = deserialized.get("resources", [{}])[0].get("uid", "")
resource_id = deserialized.get("resources", [{}])[0].get("name", "")
unique_id_from_tool = deserialized.get("finding_info", {}).get("uid", "")
Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/contrast/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def get_findings(self, filename, test):
finding.unsaved_endpoints = []
if row.get("Request URI"):
endpoint = Endpoint(
host="0.0.0.0",
host="0.0.0.0", # noqa: S104
path=row.get("Request URI"),
protocol=row.get("Request Protocol"),
)
Expand Down
2 changes: 2 additions & 0 deletions dojo/tools/intsights/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,11 @@ def get_description_for_scan_types(self, scan_type):
def _build_finding_description(self, alert: dict) -> str:
"""
Builds an IntSights Finding description from various pieces of information.

Args:
alert: The parsed alert dictionary
Returns: A markdown formatted description

"""
return "\n".join(
[
Expand Down
3 changes: 3 additions & 0 deletions dojo/tools/qualys/csv_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ def parse_csv(csv_file) -> [Finding]:
def get_report_findings(csv_reader) -> [dict]:
"""
Filters out the unneeded information at the beginning of the Qualys CSV report.

Args:
csv_reader:

Expand All @@ -55,11 +56,13 @@ def _extract_cvss_vectors(cvss_base, cvss_temporal):

This is done because the raw values come with additional characters that cannot be parsed with the cvss library.
Example: 6.7 (AV:L/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H)

Args:
cvss_base:
cvss_temporal:
Returns:
A CVSS3 Vector including both Base and Temporal if available

"""
vector_pattern = r"^\d{1,2}.\d \((.*)\)"
cvss_vector = "CVSS:3.0/"
Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/trivy/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
SECRET_DESCRIPTION_TEMPLATE = """{title}
**Category:** {category}
**Match:** {match}
"""
""" # noqa: S105

LICENSE_DESCRIPTION_TEMPLATE = """{title}
**Category:** {category}
Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/trivy_operator/secrets_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
SECRET_DESCRIPTION_TEMPLATE = """{title}
**Category:** {category}
**Match:** {match}
"""
""" # noqa: S105


class TrivySecretsHandler:
Expand Down
11 changes: 11 additions & 0 deletions dojo/tools/whitehat_sentinel/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,11 @@ def _convert_whitehat_severity_id_to_dojo_severity(
) -> str | None:
"""
Converts a WhiteHat Sentinel numerical severity to a DefectDojo severity.

Args:
whitehat_severity_id: The WhiteHat Severity ID (called risk_id in the API)
Returns: A DefectDojo severity if a mapping can be found; otherwise a null value is returned

"""
severities = [
"Informational",
Expand All @@ -79,9 +81,11 @@ def _convert_whitehat_severity_id_to_dojo_severity(
def _parse_cwe_from_tags(self, whitehat_sentinel_tags) -> str:
"""
Some Vulns include the CWE ID as a tag. This is used to pull it out of that list and return only the ID.

Args:
whitehat_sentinel_tags: The Tags list from the WhiteHat vuln
Returns: The first CWE ID in the list, if it exists

"""
for tag in whitehat_sentinel_tags:
if tag.startswith("CWE-"):
Expand All @@ -91,9 +95,11 @@ def _parse_cwe_from_tags(self, whitehat_sentinel_tags) -> str:
def _parse_description(self, whitehat_sentinel_description: dict):
"""
Manually converts the HTML description to a DefectDojo-friendly format.

Args:
whitehat_sentinel_description: The description section of the WhiteHat Sentinel vulnerability dict
Returns: A dict with description and reference link

"""
description_ref = {"description": "", "reference_link": ""}

Expand Down Expand Up @@ -144,9 +150,11 @@ def _parse_solution(self, whitehat_sentinel_vuln_solution):
def __get_href_url(self, text_to_search):
"""
Searches for the anchor targets within a string that includes an anchor tag.

Args:
text_to_search: The text string to search for an anchor tag
Returns:

"""
links = ""

Expand All @@ -157,9 +165,11 @@ def __get_href_url(self, text_to_search):
def __remove_paragraph_tags(self, html_string):
"""
Manually remove <p> tags from HTML strings to avoid importing yet-another-library.

Args:
html_string: The HMTL string to remove <p> </p> tags from
Returns: The original string stipped of paragraph tags

"""
return re.sub(r"<p>|</p>", "", html_string)

Expand Down Expand Up @@ -193,6 +203,7 @@ def _convert_whitehat_sentinel_vulns_to_dojo_finding(
whitehat_sentinel_vulns: The vuln dictionary from WhiteHat Sentinel vuln API
test: The test ID that the DefectDojo finding should be associated with
Returns: A DefectDojo Finding object

"""
dupes = {}

Expand Down
5 changes: 4 additions & 1 deletion dojo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from collections.abc import Callable
from datetime import date, datetime, timedelta
from math import pi, sqrt
from pathlib import Path

import bleach
import crum
Expand Down Expand Up @@ -88,6 +89,7 @@ def do_false_positive_history(finding, *args, **kwargs):

Args:
finding (:model:`dojo.Finding`): Finding to be replicated

"""
to_mark_as_fp = set()

Expand Down Expand Up @@ -149,6 +151,7 @@ def match_finding_to_existing_findings(finding, product=None, engagement=None, t
product (:model:`dojo.Product`, optional): Product to filter findings by
engagement (:model:`dojo.Engagement`, optional): Engagement to filter findings by
test (:model:`dojo.Test`, optional): Test to filter findings by

"""
if product:
custom_filter_type = "product"
Expand Down Expand Up @@ -1382,7 +1385,7 @@ def handle_uploaded_threat(f, eng):
# Check if threat folder exist.
if not os.path.isdir(settings.MEDIA_ROOT + "/threat/"):
# Create the folder
os.mkdir(settings.MEDIA_ROOT + "/threat/")
Path(settings.MEDIA_ROOT + "/threat/").mkdir()
with open(settings.MEDIA_ROOT + f"/threat/{eng.id}{extension}",
"wb+") as destination:
for chunk in f.chunks():
Expand Down
5 changes: 3 additions & 2 deletions dojo/views.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import os
from pathlib import Path

from auditlog.models import LogEntry
from django.conf import settings
Expand Down Expand Up @@ -150,7 +151,7 @@ def manage_files(request, oid, obj_type):

for o in files_formset.deleted_objects:
logger.debug("removing file: %s", o.file.name)
os.remove(os.path.join(settings.MEDIA_ROOT, o.file.name))
Path(os.path.join(settings.MEDIA_ROOT, o.file.name)).unlink()

for o in files_formset.new_objects:
logger.debug("adding file: %s", o.file.name)
Expand All @@ -161,7 +162,7 @@ def manage_files(request, oid, obj_type):
finding__isnull=True)
for o in orphan_files:
logger.debug("purging orphan file: %s", o.file.name)
os.remove(os.path.join(settings.MEDIA_ROOT, o.file.name))
Path(os.path.join(settings.MEDIA_ROOT, o.file.name)).unlink()
o.delete()

messages.add_message(
Expand Down
2 changes: 1 addition & 1 deletion requirements-lint.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
ruff==0.7.1
ruff==0.7.2
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ django-slack==5.19.0
git+https://github.com/DefectDojo/django-tagging@develop#egg=django-tagging
django-watson==1.6.3
django-prometheus==2.3.1
Django==5.1.2
Django==5.1.3
djangorestframework==3.15.2
html2text==2024.2.26
humanize==4.11.0
Expand Down Expand Up @@ -69,7 +69,7 @@ django-ratelimit==4.1.0
argon2-cffi==23.1.0
blackduck==1.1.3
pycurl==7.45.3 # Required for Celery Broker AWS (SQS) support
boto3==1.35.53 # Required for Celery Broker AWS (SQS) support
boto3==1.35.54 # Required for Celery Broker AWS (SQS) support
netaddr==1.3.0
vulners==2.2.3
fontawesomefree==6.6.0
Expand Down
11 changes: 7 additions & 4 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@ select = [
"W",
"C90",
"I",
"D2", "D3", "D403",
"D2", "D3", "D403", "D411", "D413",
"UP",
"YTT",
"ASYNC",
"S2", "S5", "S7", "S101", "S112", "S311",
"S2", "S5", "S7", "S101", "S104", "S105", "S112", "S311",
"FBT001", "FBT003",
"A003", "A004", "A006",
"COM",
Expand All @@ -65,7 +65,7 @@ select = [
"TCH",
"INT",
"ARG003", "ARG004", "ARG005",
"PTH2",
"PTH2", "PTH101", "PTH102", "PTH103", "PTH104", "PTH105", "PTH106", "PTH107", "PTH108", "PTH109", "PTH110", "PTH111", "PTH114", "PTH115", "PTH116", "PTH117", "PTH119", "PTH121", "PTH124",
"TD001", "TD004", "TD005",
"PD",
"PGH",
Expand Down Expand Up @@ -99,7 +99,10 @@ fixable = ["ALL"]
unfixable = []
preview = true

per-file-ignores = {}
[lint.per-file-ignores]
"unittests/**" = [
"S105", # hardcoded passwords in tests are fine
]

[lint.flake8-boolean-trap]
extend-allowed-calls = ["dojo.utils.get_system_setting"]
Expand Down
3 changes: 2 additions & 1 deletion tests/Import_scanner_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import shutil
import sys
import unittest
from pathlib import Path

import git
from base_test_class import BaseTestCase
Expand All @@ -23,7 +24,7 @@ def setUp(self):
self.repo_path = dir_path + "/scans"
if os.path.isdir(self.repo_path):
shutil.rmtree(self.repo_path)
os.mkdir(self.repo_path)
Path(self.repo_path).mkdir()
git.Repo.clone_from("https://github.com/DefectDojo/sample-scan-files", self.repo_path)
self.remove_items = ["__init__.py", "__init__.pyc", "factory.py", "factory.pyc",
"factory.py", "LICENSE", "README.md", ".gitignore", ".git", "__pycache__"]
Expand Down
2 changes: 1 addition & 1 deletion tests/finding_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def check_file(self, file_name):
file_found = True
break
self.assertTrue(file_found, f"Cannot find {file_name}")
os.remove(file_name)
Path(file_name).unlink()

def test_csv_export(self):
driver = self.driver
Expand Down
Loading
Loading