diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 2a4d7ef037..0e42769cd7 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -48,6 +48,8 @@ jobs: steps: - name: Load OAS files from artifacts uses: actions/download-artifact@v4 + with: + pattern: oas-* - name: Upload Release Asset - OpenAPI Specification - YAML id: upload-release-asset-yaml diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index ae890a24c1..28c77fc976 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -1,7 +1,7 @@ # code: language=Dockerfile -FROM openapitools/openapi-generator-cli:v7.9.0@sha256:bb32f5f0c9f5bdbb7b00959e8009de0230aedc200662701f05fc244c36f967ba AS openapitools +FROM openapitools/openapi-generator-cli:v7.10.0@sha256:f2054a5a7908ad81017d0f0839514ba5eab06ae628914ff71554d46fac1bcf7a AS openapitools FROM python:3.11.9-slim-bookworm@sha256:8c1036ec919826052306dfb5286e4753ffd9d5f6c24fbc352a5399c3b405b57e AS build WORKDIR /app RUN \ diff --git a/Dockerfile.nginx-alpine b/Dockerfile.nginx-alpine index b1bd293b09..17abb7c3f8 100644 --- a/Dockerfile.nginx-alpine +++ b/Dockerfile.nginx-alpine @@ -140,7 +140,7 @@ COPY manage.py ./ COPY dojo/ ./dojo/ RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true -FROM nginx:1.27.2-alpine@sha256:2140dad235c130ac861018a4e13a6bc8aea3a35f3a40e20c1b060d51a7efd250 +FROM nginx:1.27.2-alpine@sha256:74175cf34632e88c6cfe206897cbfe2d2fecf9bf033c40e7f9775a3689e8adc7 ARG uid=1001 ARG appuser=defectdojo COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/ diff --git a/Dockerfile.nginx-debian b/Dockerfile.nginx-debian index f818e54c7f..b062e28e10 100644 --- a/Dockerfile.nginx-debian +++ b/Dockerfile.nginx-debian @@ -73,7 +73,7 @@ COPY dojo/ ./dojo/ RUN env DD_SECRET_KEY='.' python3 manage.py collectstatic --noinput && true -FROM nginx:1.27.2-alpine@sha256:2140dad235c130ac861018a4e13a6bc8aea3a35f3a40e20c1b060d51a7efd250 +FROM nginx:1.27.2-alpine@sha256:74175cf34632e88c6cfe206897cbfe2d2fecf9bf033c40e7f9775a3689e8adc7 ARG uid=1001 ARG appuser=defectdojo COPY --from=collectstatic /app/static/ /usr/share/nginx/html/static/ diff --git a/components/node_modules/.gitkeep b/components/node_modules/.gitkeep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/docker-compose.yml b/docker-compose.yml index b9ec0ac9b6..f236ae4a87 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -103,7 +103,7 @@ services: source: ./docker/extra_settings target: /app/docker/extra_settings postgres: - image: postgres:17.0-alpine@sha256:d388be15cfb665c723da47cccdc7ea5c003ed71f700c5419bbd075033227ce1f + image: postgres:17.1-alpine@sha256:0d9624535618a135c5453258fd629f4963390338b11aaffb92292c12df3a6c17 environment: POSTGRES_DB: ${DD_DATABASE_NAME:-defectdojo} POSTGRES_USER: ${DD_DATABASE_USER:-defectdojo} diff --git a/docs/content/en/open_source/archived_docs/usage/features.md b/docs/content/en/open_source/archived_docs/usage/features.md index bb4fe83cd0..c026a2f3a1 100644 --- a/docs/content/en/open_source/archived_docs/usage/features.md +++ b/docs/content/en/open_source/archived_docs/usage/features.md @@ -244,7 +244,7 @@ The environment variable will override the settings in `settings.dist.py`, repla The available algorithms are: -DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL +DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `unique_id_from_tool`) : The deduplication occurs based on finding.unique_id_from_tool which is a unique technical id existing in the source tool. Few scanners populate this @@ -266,12 +266,12 @@ DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL able to recognise that findings found in previous scans are actually the same as the new findings. -DEDUPE_ALGO_HASH_CODE +DEDUPE_ALGO_HASH_CODE (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `hash_code`) : The deduplication occurs based on finding.hash_code. The hash_code itself is configurable for each scanner in parameter `HASHCODE_FIELDS_PER_SCANNER`. -DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE +DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `unique_id_from_tool_or_hash_code`) : A finding is a duplicate with another if they have the same unique_id_from_tool OR the same hash_code. @@ -284,7 +284,7 @@ DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE cross-parser deduplication -DEDUPE_ALGO_LEGACY +DEDUPE_ALGO_LEGACY (value for `DD_DEDUPLICATION_ALGORITHM_PER_PARSER`: `legacy`) : This is algorithm that was in place before the configuration per parser was made possible, and also the default one for backward compatibility reasons. diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index c9f73c97ce..de0e6a49de 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -417,6 +417,51 @@ class Meta: fields = "__all__" +class MetadataSerializer(serializers.Serializer): + name = serializers.CharField(max_length=120) + value = serializers.CharField(max_length=300) + + +class MetaMainSerializer(serializers.Serializer): + id = serializers.IntegerField(read_only=True) + + product = serializers.PrimaryKeyRelatedField( + queryset=Product.objects.all(), + required=False, + default=None, + allow_null=True, + ) + endpoint = serializers.PrimaryKeyRelatedField( + queryset=Endpoint.objects.all(), + required=False, + default=None, + allow_null=True, + ) + finding = serializers.PrimaryKeyRelatedField( + queryset=Finding.objects.all(), + required=False, + default=None, + allow_null=True, + ) + metadata = MetadataSerializer(many=True) + + def validate(self, data): + product_id = data.get("product", None) + endpoint_id = data.get("endpoint", None) + finding_id = data.get("finding", None) + metadata = data.get("metadata") + + for item in metadata: + # this will only verify that one and only one of product, endpoint, or finding is passed... + DojoMeta(product=product_id, + endpoint=endpoint_id, + finding=finding_id, + name=item.get("name"), + value=item.get("value")).clean() + + return data + + class ProductMetaSerializer(serializers.ModelSerializer): class Meta: model = DojoMeta diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index e6dd30dab4..1a9eab8641 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -1650,6 +1650,61 @@ class DojoMetaViewSet( def get_queryset(self): return get_authorized_dojo_meta(Permissions.Product_View) + @extend_schema( + methods=["post", "patch"], + request=serializers.MetaMainSerializer, + responses={status.HTTP_200_OK: serializers.MetaMainSerializer}, + filters=False, + ) + @action( + detail=False, methods=["post", "patch"], pagination_class=None, + ) + def batch(self, request, pk=None): + serialized_data = serializers.MetaMainSerializer(data=request.data) + if serialized_data.is_valid(raise_exception=True): + if request.method == "POST": + self.process_post(request.data) + if request.method == "PATCH": + self.process_patch(request.data) + + return Response(status=status.HTTP_201_CREATED, data=serialized_data.data) + + def process_post(self: object, data: dict): + product = Product.objects.filter(id=data.get("product")).first() + finding = Finding.objects.filter(id=data.get("finding")).first() + endpoint = Endpoint.objects.filter(id=data.get("endpoint")).first() + metalist = data.get("metadata") + for metadata in metalist: + try: + DojoMeta.objects.create( + product=product, + finding=finding, + endpoint=endpoint, + name=metadata.get("name"), + value=metadata.get("value"), + ) + except (IntegrityError) as ex: # this should not happen as the data was validated in the batch call + raise ValidationError(str(ex)) + + def process_patch(self: object, data: dict): + product = Product.objects.filter(id=data.get("product")).first() + finding = Finding.objects.filter(id=data.get("finding")).first() + endpoint = Endpoint.objects.filter(id=data.get("endpoint")).first() + metalist = data.get("metadata") + for metadata in metalist: + dojometa = DojoMeta.objects.filter(product=product, finding=finding, endpoint=endpoint, name=metadata.get("name")) + if dojometa: + try: + dojometa.update( + name=metadata.get("name"), + value=metadata.get("value"), + ) + except (IntegrityError) as ex: + raise ValidationError(str(ex)) + else: + msg = f"Metadata {metadata.get('name')} not found for object." + raise ValidationError(msg) + @extend_schema_view(**schema_with_prefetch()) class ProductViewSet( @@ -3087,6 +3142,29 @@ class QuestionnaireEngagementSurveyViewSet( def get_queryset(self): return Engagement_Survey.objects.all().order_by("id") + @extend_schema( + request=OpenApiTypes.NONE, + parameters=[ + OpenApiParameter( + "engagement_id", OpenApiTypes.INT, OpenApiParameter.PATH, + ), + ], + responses={status.HTTP_200_OK: serializers.QuestionnaireAnsweredSurveySerializer}, + ) + @action( + detail=True, methods=["post"], url_path=r"link_engagement/(?P\d+)", + ) + def link_engagement(self, request, pk, engagement_id): + # Get the answered survey + engagement_survey = self.get_object() + # Safely get the engagement + engagement = get_object_or_404(Engagement.objects, pk=engagement_id) + # Link the engagement + answered_survey, _ = Answered_Survey.objects.get_or_create(engagement=engagement, survey=engagement_survey) + # Send a favorable response + serialized_answered_survey = serializers.QuestionnaireAnsweredSurveySerializer(answered_survey) + return Response(serialized_answered_survey.data) + @extend_schema_view(**schema_with_prefetch()) class QuestionnaireAnsweredSurveyViewSet( diff --git a/dojo/forms.py b/dojo/forms.py index 3577a9ff2a..d56cd1ebad 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -4,6 +4,7 @@ import re import warnings from datetime import date, datetime +from pathlib import Path import tagulous from crispy_forms.bootstrap import InlineCheckboxes, InlineRadios @@ -754,7 +755,8 @@ class UploadThreatForm(forms.Form): def clean(self): if (file := self.cleaned_data.get("file", None)) is not None: - ext = os.path.splitext(file.name)[1] # [0] returns path+filename + path = Path(file.name) + ext = path.suffix valid_extensions = [".jpg", ".png", ".pdf"] if ext.lower() not in valid_extensions: if accepted_extensions := f"{', '.join(valid_extensions)}": @@ -872,7 +874,8 @@ def clean(self): for form in self.forms: file = form.cleaned_data.get("file", None) if file: - ext = os.path.splitext(file.name)[1] # [0] returns path+filename + path = Path(file.name) + ext = path.suffix valid_extensions = settings.FILE_UPLOAD_TYPES if ext.lower() not in valid_extensions: if accepted_extensions := f"{', '.join(valid_extensions)}": diff --git a/dojo/models.py b/dojo/models.py index 037b22b919..61bd6622fc 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -141,7 +141,9 @@ def __init__(self, directory=None, keep_basename=False, keep_ext=True): self.keep_ext = keep_ext def __call__(self, model_instance, filename): - base, ext = os.path.splitext(filename) + path = Path(filename) + base = path.parent / path.stem + ext = path.suffix filename = f"{base}_{uuid4()}" if self.keep_basename else str(uuid4()) if self.keep_ext: filename += ext diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum index 22027cc18f..f4b9461b96 100644 --- a/dojo/settings/.settings.dist.py.sha256sum +++ b/dojo/settings/.settings.dist.py.sha256sum @@ -1 +1 @@ -fc660db6c2f55181fd8515d9b13c75197d8272c5c635235f6f60e4b1fc77af04 +01215b397651163c0403b028adb08b18fa83c4abb188b0536dfb9e43eddcd9cd diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index df4ea452cb..8c68bf8800 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1301,6 +1301,12 @@ def saml2_attrib_map_format(dict): if len(env("DD_HASHCODE_FIELDS_PER_SCANNER")) > 0: env_hashcode_fields_per_scanner = json.loads(env("DD_HASHCODE_FIELDS_PER_SCANNER")) for key, value in env_hashcode_fields_per_scanner.items(): + if not isinstance(value, list): + msg = f"Fields definition '{value}' for hashcode calculation of '{key}' is not valid. It needs to be list of strings but it is {type(value)}." + raise TypeError(msg) + if not all(isinstance(field, str) for field in value): + msg = f"Fields for hashcode calculation for {key} are not valid. It needs to be list of strings. Some of fields are not string." + raise AttributeError(msg) if key in HASHCODE_FIELDS_PER_SCANNER: logger.info(f"Replacing {key} with value {value} (previously set to {HASHCODE_FIELDS_PER_SCANNER[key]}) from env var DD_HASHCODE_FIELDS_PER_SCANNER") HASHCODE_FIELDS_PER_SCANNER[key] = value @@ -1382,6 +1388,13 @@ def saml2_attrib_map_format(dict): # Makes it possible to deduplicate on a technical id (same parser) and also on some functional fields (cross-parsers deduplication) DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE = "unique_id_from_tool_or_hash_code" +DEDUPE_ALGOS = [ + DEDUPE_ALGO_LEGACY, + DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, + DEDUPE_ALGO_HASH_CODE, + DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, +] + # Allows to deduplicate with endpoints if endpoints is not included in the hashcode. # Possible values are: scheme, host, port, path, query, fragment, userinfo, and user. For a details description see https://hyperlink.readthedocs.io/en/latest/api.html#attributes. # Example: @@ -1532,6 +1545,9 @@ def saml2_attrib_map_format(dict): if len(env("DD_DEDUPLICATION_ALGORITHM_PER_PARSER")) > 0: env_dedup_algorithm_per_parser = json.loads(env("DD_DEDUPLICATION_ALGORITHM_PER_PARSER")) for key, value in env_dedup_algorithm_per_parser.items(): + if value not in DEDUPE_ALGOS: + msg = f"DEDUP algorithm '{value}' for '{key}' is not valid. Use one of following values: {', '.join(DEDUPE_ALGOS)}" + raise AttributeError(msg) if key in DEDUPLICATION_ALGORITHM_PER_PARSER: logger.info(f"Replacing {key} with value {value} (previously set to {DEDUPLICATION_ALGORITHM_PER_PARSER[key]}) from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER") DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value @@ -1750,9 +1766,14 @@ def saml2_attrib_map_format(dict): "ELSA": "https://linux.oracle.com/errata/&&.html", # e.g. https://linux.oracle.com/errata/ELSA-2024-12714.html "ELBA": "https://linux.oracle.com/errata/&&.html", # e.g. https://linux.oracle.com/errata/ELBA-2024-7457.html "RXSA": "https://errata.rockylinux.org/", # e.g. https://errata.rockylinux.org/RXSA-2024:4928 + "C-": "https://hub.armosec.io/docs/", # e.g. https://hub.armosec.io/docs/c-0085 + "AVD": "https://avd.aquasec.com/misconfig/", # e.g. https://avd.aquasec.com/misconfig/avd-ksv-01010 + "KHV": "https://avd.aquasec.com/misconfig/kubernetes/", # e.g. https://avd.aquasec.com/misconfig/kubernetes/khv045 "CAPEC": "https://capec.mitre.org/data/definitions/&&.html", # e.g. https://capec.mitre.org/data/definitions/157.html "CWE": "https://cwe.mitre.org/data/definitions/&&.html", # e.g. https://cwe.mitre.org/data/definitions/79.html "TEMP": "https://security-tracker.debian.org/tracker/", # e.g. https://security-tracker.debian.org/tracker/TEMP-0841856-B18BAF + "DSA": "https://security-tracker.debian.org/tracker/", # e.g. https://security-tracker.debian.org/tracker/DSA-5791-1 + "RLSA": "https://errata.rockylinux.org/", # e.g. https://errata.rockylinux.org/RLSA-2024:7001 } # List of acceptable file types that can be uploaded to a given object via arbitrary file upload FILE_UPLOAD_TYPES = env("DD_FILE_UPLOAD_TYPES") diff --git a/dojo/templatetags/display_tags.py b/dojo/templatetags/display_tags.py index 3fa030d90a..cd3627d41b 100644 --- a/dojo/templatetags/display_tags.py +++ b/dojo/templatetags/display_tags.py @@ -780,6 +780,8 @@ def vulnerability_url(vulnerability_id): for key in settings.VULNERABILITY_URLS: if vulnerability_id.upper().startswith(key): + if key in ["AVD", "KHV", "C-"]: + return settings.VULNERABILITY_URLS[key] + str(vulnerability_id.lower()) if "&&" in settings.VULNERABILITY_URLS[key]: # Process specific keys specially if need if key in ["CAPEC", "CWE"]: diff --git a/dojo/tools/aws_prowler_v3plus/prowler_v4.py b/dojo/tools/aws_prowler_v3plus/prowler_v4.py index 1b9c155b63..dd543f27f5 100644 --- a/dojo/tools/aws_prowler_v3plus/prowler_v4.py +++ b/dojo/tools/aws_prowler_v3plus/prowler_v4.py @@ -37,7 +37,8 @@ def process_ocsf_json(self, file, test): documentation = deserialized.get("remediation", {}).get("references", "") documentation = str(documentation) + "\n" + str(deserialized.get("unmapped", {}).get("related_url", "")) security_domain = deserialized.get("resources", [{}])[0].get("type", "") - timestamp = deserialized.get("event_time") + # Prowler v4.5.0 changed 'event_time' key in report with 'time_dt' + timestamp = deserialized.get("time_dt") or deserialized.get("event_time") resource_arn = deserialized.get("resources", [{}])[0].get("uid", "") resource_id = deserialized.get("resources", [{}])[0].get("name", "") unique_id_from_tool = deserialized.get("finding_info", {}).get("uid", "") diff --git a/dojo/tools/bearer_cli/parser.py b/dojo/tools/bearer_cli/parser.py index a7b4eba4f2..0c72db3877 100644 --- a/dojo/tools/bearer_cli/parser.py +++ b/dojo/tools/bearer_cli/parser.py @@ -33,7 +33,7 @@ def get_findings(self, file, test): finding = Finding( title=bearerfinding["title"] + " in " + bearerfinding["filename"] + ":" + str(bearerfinding["line_number"]), test=test, - description=bearerfinding["description"] + "\n Detected code snippet: \n" + bearerfinding["snippet"], + description=bearerfinding["description"] + "\n Detected code snippet: \n" + bearerfinding.get("snippet", bearerfinding.get("code_extract")), severity=severity, cwe=bearerfinding["cwe_ids"][0], static_finding=True, diff --git a/dojo/tools/kubescape/parser.py b/dojo/tools/kubescape/parser.py index c371f47790..0f472a902e 100644 --- a/dojo/tools/kubescape/parser.py +++ b/dojo/tools/kubescape/parser.py @@ -59,65 +59,65 @@ def get_findings(self, filename, test): controls = results[0].get("controls", []) for control in controls: - # This condition is true if the result doesn't contain the status for each control (old format) - retrocompatibility_condition = "status" not in control or "status" not in control["status"] - if retrocompatibility_condition or control["status"]["status"] == "failed": - control_name = control["name"] - if resource_type and resource_name and control_name: - title = f"{control_name} - {resource_type} {resource_name}" - else: - title = f"{control_name} - {resourceid}" - controlID = control["controlID"] - - # Find control details - controlSummary = self.find_control_summary_by_id(data, controlID) - if controlSummary is None: - severity = "Info" - mitigation = "" - else: - severity = self.severity_mapper(controlSummary.get("scoreFactor", 0)) - # Define mitigation if available - if "mitigation" in controlSummary: - mitigation = controlSummary["mitigation"] + for rule in control["rules"]: + if rule["status"] == "passed": + continue + # This condition is true if the result doesn't contain the status for each control (old format) + retrocompatibility_condition = "status" not in control or "status" not in control["status"] + if retrocompatibility_condition or control["status"]["status"] == "failed": + control_name = control["name"] + if resource_type and resource_name and control_name: + title = f"{control_name} - {resource_type} {resource_name}" else: - mitigation = "" + title = f"{control_name} - {resourceid}" + controlID = control["controlID"] - armoLink = f"https://hub.armosec.io/docs/{controlID.lower()}" - description = "**Summary:** " + f"The ressource '{resourceid}' has failed the control '{control_name}'." + "\n" - if controlSummary is not None and "description" in controlSummary: - description += "**Description:** " + controlSummary["description"] + "\n" - - # Define category if available - if controlSummary is not None and "category" in controlSummary and "subCategory" in controlSummary["category"]: - category_name = controlSummary["category"]["name"] - category_subname = controlSummary["category"]["subCategory"]["name"] - category = f"{category_name} > {category_subname}" - description += "**Category:** " + category + "\n" - elif controlSummary is not None and "category" in controlSummary and "name" in controlSummary["category"]: - category = controlSummary["category"]["name"] - description += "**Category:** " + category + "\n" - - description += "View control details here: " + self.__hyperlink(armoLink) - - steps_to_reproduce = "The following rules have failed :" + "\n" - steps_to_reproduce += "\t**Rules:** " + str(json.dumps(control["rules"], indent=4)) + "\n" - - steps_to_reproduce += "Resource object may contain evidence:" + "\n" - steps_to_reproduce += "\t**Resource object:** " + str(json.dumps(resource["object"], indent=4)) - - references = armoLink - - find = Finding( - title=textwrap.shorten(title, 150), - test=test, - description=description, - mitigation=mitigation, - steps_to_reproduce=steps_to_reproduce, - references=references, - severity=severity, - component_name=resourceid, - static_finding=True, - dynamic_finding=False, - ) - findings.append(find) + # Find control details + controlSummary = self.find_control_summary_by_id(data, controlID) + if controlSummary is None: + severity = "Info" + mitigation = "" + else: + severity = self.severity_mapper(controlSummary.get("scoreFactor", 0)) + # Define mitigation if available + if "mitigation" in controlSummary: + mitigation = controlSummary["mitigation"] + else: + mitigation = "" + + description = "**Summary:** " + f"The ressource '{resourceid}' has failed the control '{control_name}'." + "\n" + if controlSummary is not None and "description" in controlSummary: + description += "**Description:** " + controlSummary["description"] + "\n" + + # Define category if available + if controlSummary is not None and "category" in controlSummary and "subCategory" in controlSummary["category"]: + category_name = controlSummary["category"]["name"] + category_subname = controlSummary["category"]["subCategory"]["name"] + category = f"{category_name} > {category_subname}" + description += "**Category:** " + category + "\n" + elif controlSummary is not None and "category" in controlSummary and "name" in controlSummary["category"]: + category = controlSummary["category"]["name"] + description += "**Category:** " + category + "\n" + + steps_to_reproduce = "The following rules have failed :" + "\n" + steps_to_reproduce += "\t**Rules:** " + str(json.dumps(control["rules"], indent=4)) + "\n" + steps_to_reproduce += "Resource object may contain evidence:" + "\n" + steps_to_reproduce += "\t**Resource object:** " + str(json.dumps(resource["object"], indent=4)) + + find = Finding( + title=textwrap.shorten(title, 150), + test=test, + description=description, + mitigation=mitigation, + steps_to_reproduce=steps_to_reproduce, + references=f"https://hub.armosec.io/docs/{controlID.lower()}", + severity=severity, + component_name=resourceid, + static_finding=True, + dynamic_finding=False, + ) + if controlID is not None: + find.unsaved_vulnerability_ids = [] + find.unsaved_vulnerability_ids.append(controlID) + findings.append(find) return findings diff --git a/dojo/tools/trivy_operator/checks_handler.py b/dojo/tools/trivy_operator/checks_handler.py index c42eef0fa8..2a260ff568 100644 --- a/dojo/tools/trivy_operator/checks_handler.py +++ b/dojo/tools/trivy_operator/checks_handler.py @@ -1,4 +1,5 @@ from dojo.models import Finding +from dojo.tools.trivy_operator.uniform_vulnid import UniformTrivyVulnID TRIVY_SEVERITIES = { "CRITICAL": "Critical", @@ -47,6 +48,6 @@ def handle_checks(self, labels, checks, test): tags=[resource_namespace], ) if check_id: - finding.unsaved_vulnerability_ids = [check_id] + finding.unsaved_vulnerability_ids = [UniformTrivyVulnID().return_uniformed_vulnid(check_id)] findings.append(finding) return findings diff --git a/dojo/tools/trivy_operator/compliance_handler.py b/dojo/tools/trivy_operator/compliance_handler.py index 9e27c56ddf..62a63929e2 100644 --- a/dojo/tools/trivy_operator/compliance_handler.py +++ b/dojo/tools/trivy_operator/compliance_handler.py @@ -1,4 +1,5 @@ from dojo.models import Finding +from dojo.tools.trivy_operator.uniform_vulnid import UniformTrivyVulnID TRIVY_SEVERITIES = { "CRITICAL": "Critical", @@ -54,6 +55,6 @@ def handle_compliance(self, benchmarkreport, test): dynamic_finding=True, ) if check_checkID: - finding.unsaved_vulnerability_ids = [check_checkID] + finding.unsaved_vulnerability_ids = [UniformTrivyVulnID().return_uniformed_vulnid(check_checkID)] findings.append(finding) return findings diff --git a/dojo/tools/trivy_operator/parser.py b/dojo/tools/trivy_operator/parser.py index 8be42e8e31..ba7cc730a1 100644 --- a/dojo/tools/trivy_operator/parser.py +++ b/dojo/tools/trivy_operator/parser.py @@ -20,7 +20,6 @@ def get_description_for_scan_types(self, scan_type): def get_findings(self, scan_file, test): scan_data = scan_file.read() - try: data = json.loads(str(scan_data, "utf-8")) except Exception: @@ -29,6 +28,11 @@ def get_findings(self, scan_file, test): if type(data) is list: for listitems in data: findings += self.output_findings(listitems, test) + elif type(data) is dict and bool(set(data.keys()) & {"clustercompliancereports.aquasecurity.github.io", "clusterconfigauditreports.aquasecurity.github.io", "clusterinfraassessmentreports.aquasecurity.github.io", "clusterrbacassessmentreports.aquasecurity.github.io", "configauditreports.aquasecurity.github.io", "exposedsecretreports.aquasecurity.github.io", "infraassessmentreports.aquasecurity.github.io", "rbacassessmentreports.aquasecurity.github.io", "vulnerabilityreports.aquasecurity.github.io"}): + for datakey in list(data.keys()): + if datakey not in ["clustersbomreports.aquasecurity.github.io", "sbomreports.aquasecurity.github.io"]: + for listitems in (data[datakey]): + findings += self.output_findings(listitems, test) else: findings += self.output_findings(data, test) return findings diff --git a/dojo/tools/trivy_operator/secrets_handler.py b/dojo/tools/trivy_operator/secrets_handler.py index 5dcd7a7bfe..855229fbd4 100644 --- a/dojo/tools/trivy_operator/secrets_handler.py +++ b/dojo/tools/trivy_operator/secrets_handler.py @@ -42,6 +42,7 @@ def handle_secrets(self, labels, secrets, test): secret_description += "\n**resource.kind:** " + resource_kind secret_description += "\n**resource.name:** " + resource_name secret_description += "\n**resource.namespace:** " + resource_namespace + secret_description += "\n**ruleID:** " + secret_rule_id finding = Finding( test=test, title=title, @@ -54,7 +55,5 @@ def handle_secrets(self, labels, secrets, test): service=service, tags=[resource_namespace], ) - if secret_rule_id: - finding.unsaved_vulnerability_ids = [secret_rule_id] findings.append(finding) return findings diff --git a/dojo/tools/trivy_operator/uniform_vulnid.py b/dojo/tools/trivy_operator/uniform_vulnid.py new file mode 100644 index 0000000000..b3aae5055e --- /dev/null +++ b/dojo/tools/trivy_operator/uniform_vulnid.py @@ -0,0 +1,20 @@ +import re + + +class UniformTrivyVulnID: + def return_uniformed_vulnid(self, vulnid): + if vulnid is None: + return vulnid + if "cve" in vulnid.lower(): + return vulnid + if "khv" in vulnid.lower(): + temp = re.compile("([a-zA-Z-_]+)([0-9]+)") + number = str(temp.match(vulnid).groups()[1]).zfill(3) + avd_category = str(temp.match(vulnid.lower()).groups()[0]) + return avd_category.upper() + number + if "ksv" in vulnid.lower() or "kcv" in vulnid.lower(): + temp = re.compile("([a-zA-Z-_]+)([0-9]+)") + number = str(temp.match(vulnid).groups()[1]).zfill(4) + avd_category = str(temp.match(vulnid.lower().replace("_", "").replace("-", "")).groups()[0].replace("avd", "")) + return "AVD-" + avd_category.upper() + "-" + number + return vulnid diff --git a/dojo/tools/trivy_operator/vulnerability_handler.py b/dojo/tools/trivy_operator/vulnerability_handler.py index a5a26e1288..99faa009d1 100644 --- a/dojo/tools/trivy_operator/vulnerability_handler.py +++ b/dojo/tools/trivy_operator/vulnerability_handler.py @@ -1,4 +1,5 @@ from dojo.models import Finding +from dojo.tools.trivy_operator.uniform_vulnid import UniformTrivyVulnID DESCRIPTION_TEMPLATE = """{title} **Fixed version:** {fixed_version} @@ -85,6 +86,6 @@ def handle_vulns(self, labels, vulnerabilities, test): tags=finding_tags, ) if vuln_id: - finding.unsaved_vulnerability_ids = [vuln_id] + finding.unsaved_vulnerability_ids = [UniformTrivyVulnID().return_uniformed_vulnid(vuln_id)] findings.append(finding) return findings diff --git a/dojo/utils.py b/dojo/utils.py index d30ef1ea63..c19da8945b 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -1381,7 +1381,8 @@ def get_page_items_and_count(request, items, page_size, prefix="", do_count=True def handle_uploaded_threat(f, eng): - _name, extension = os.path.splitext(f.name) + path = Path(f.name) + extension = path.suffix # Check if threat folder exist. if not Path(settings.MEDIA_ROOT + "/threat/").is_dir(): # Create the folder @@ -1395,7 +1396,8 @@ def handle_uploaded_threat(f, eng): def handle_uploaded_selenium(f, cred): - _name, extension = os.path.splitext(f.name) + path = Path(f.name) + extension = path.suffix with open(settings.MEDIA_ROOT + f"/selenium/{cred.id}{extension}", "wb+") as destination: for chunk in f.chunks(): @@ -2699,7 +2701,9 @@ def generate_file_response_from_file_path( ) -> FileResponse: """Serve an local file in a uniformed way.""" # Determine the file path - file_path_without_extension, file_extension = os.path.splitext(file_path) + path = Path(file_path) + file_path_without_extension = path.parent / path.stem + file_extension = path.suffix # Determine the file name if not supplied if file_name is None: file_name = file_path_without_extension.rsplit("/")[-1] diff --git a/helm/defectdojo/.helmignore b/helm/defectdojo/.helmignore index 50af031725..70909a86d6 100644 --- a/helm/defectdojo/.helmignore +++ b/helm/defectdojo/.helmignore @@ -20,3 +20,4 @@ .idea/ *.tmproj .vscode/ +README.md diff --git a/helm/defectdojo/Chart.lock b/helm/defectdojo/Chart.lock index 611d1100a4..7a0e49b95d 100644 --- a/helm/defectdojo/Chart.lock +++ b/helm/defectdojo/Chart.lock @@ -1,12 +1,12 @@ dependencies: - name: postgresql repository: https://charts.bitnami.com/bitnami - version: 16.1.0 + version: 16.2.0 - name: postgresql-ha repository: https://charts.bitnami.com/bitnami version: 9.4.11 - name: redis repository: https://charts.bitnami.com/bitnami version: 19.6.4 -digest: sha256:499d18e7070e7752e0dccfa2187d755570e105eb21cae37d6f0623a333997db8 -generated: "2024-10-30T17:58:45.866148081Z" +digest: sha256:0d2e729a1b07543cb813f80f5d05c67ad56817f1b44911e08245e43868f49301 +generated: "2024-11-14T10:51:48.400717864Z" diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 9d7a99e360..de861e9549 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 appVersion: "2.41.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.160-dev +version: 1.6.161-dev icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap @@ -10,7 +10,7 @@ maintainers: url: https://github.com/DefectDojo/django-DefectDojo dependencies: - name: postgresql - version: ~16.1.0 + version: ~16.2.0 repository: "https://charts.bitnami.com/bitnami" condition: postgresql.enabled - name: postgresql-ha diff --git a/helm/defectdojo/README.md b/helm/defectdojo/README.md new file mode 120000 index 0000000000..5c0dd98ed0 --- /dev/null +++ b/helm/defectdojo/README.md @@ -0,0 +1 @@ +../../readme-docs/KUBERNETES.md \ No newline at end of file diff --git a/helm/defectdojo/templates/celery-beat-deployment.yaml b/helm/defectdojo/templates/celery-beat-deployment.yaml index 7eda46b868..973c2c857a 100644 --- a/helm/defectdojo/templates/celery-beat-deployment.yaml +++ b/helm/defectdojo/templates/celery-beat-deployment.yaml @@ -66,17 +66,14 @@ spec: path: {{ .hostPath }} {{- end }} {{- end }} - {{- if .Values.dbMigrationChecker.enabled }} + {{- if or .Values.dbMigrationChecker.enabled .Values.cloudsql.enabled }} initContainers: - {{$data := dict "fullName" $fullName }} - {{- $newContext := merge . (dict "fullName" $fullName) }} - {{- include "dbMigrationChecker" $newContext | nindent 6 }} {{- end }} - containers: {{- if .Values.cloudsql.enabled }} - name: cloudsql-proxy image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} + restartPolicy: Always securityContext: runAsNonRoot: true command: ["/cloud_sql_proxy"] @@ -92,6 +89,12 @@ spec: - "-ip_address_types=PRIVATE" {{- end }} {{- end }} + {{- if .Values.dbMigrationChecker.enabled }} + {{$data := dict "fullName" $fullName }} + {{- $newContext := merge . (dict "fullName" $fullName) }} + {{- include "dbMigrationChecker" $newContext | nindent 6 }} + {{- end }} + containers: - command: - /entrypoint-celery-beat.sh name: celery diff --git a/helm/defectdojo/templates/celery-worker-deployment.yaml b/helm/defectdojo/templates/celery-worker-deployment.yaml index f21be7a13b..4ac4ddce89 100644 --- a/helm/defectdojo/templates/celery-worker-deployment.yaml +++ b/helm/defectdojo/templates/celery-worker-deployment.yaml @@ -64,17 +64,14 @@ spec: path: {{ .hostPath }} {{- end }} {{- end }} - {{- if .Values.dbMigrationChecker.enabled }} + {{- if or .Values.dbMigrationChecker.enabled .Values.cloudsql.enabled }} initContainers: - {{$data := dict "fullName" $fullName }} - {{- $newContext := merge . (dict "fullName" $fullName) }} - {{- include "dbMigrationChecker" $newContext | nindent 6 }} {{- end }} - containers: {{- if .Values.cloudsql.enabled }} - name: cloudsql-proxy image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} + restartPolicy: Always securityContext: runAsNonRoot: true command: ["/cloud_sql_proxy"] @@ -90,6 +87,12 @@ spec: - "-ip_address_types=PRIVATE" {{- end }} {{- end }} + {{- if .Values.dbMigrationChecker.enabled }} + {{$data := dict "fullName" $fullName }} + {{- $newContext := merge . (dict "fullName" $fullName) }} + {{- include "dbMigrationChecker" $newContext | nindent 6 }} + {{- end }} + containers: - name: celery image: "{{ template "celery.repository" . }}:{{ .Values.tag }}" imagePullPolicy: {{ .Values.imagePullPolicy }} diff --git a/helm/defectdojo/templates/django-deployment.yaml b/helm/defectdojo/templates/django-deployment.yaml index ce126f1cc2..d8610810fb 100644 --- a/helm/defectdojo/templates/django-deployment.yaml +++ b/helm/defectdojo/templates/django-deployment.yaml @@ -82,17 +82,14 @@ spec: emptyDir: {} {{- end }} {{- end }} - {{- if .Values.dbMigrationChecker.enabled }} + {{- if or .Values.dbMigrationChecker.enabled .Values.cloudsql.enabled }} initContainers: - {{$data := dict "fullName" $fullName }} - {{- $newContext := merge . (dict "fullName" $fullName) }} - {{- include "dbMigrationChecker" $newContext | nindent 6 }} {{- end }} - containers: {{- if .Values.cloudsql.enabled }} - name: cloudsql-proxy image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} + restartPolicy: Always securityContext: runAsNonRoot: true command: ["/cloud_sql_proxy"] @@ -108,6 +105,12 @@ spec: - "-ip_address_types=PRIVATE" {{- end }} {{- end }} + {{- if .Values.dbMigrationChecker.enabled }} + {{$data := dict "fullName" $fullName }} + {{- $newContext := merge . (dict "fullName" $fullName) }} + {{- include "dbMigrationChecker" $newContext | nindent 6 }} + {{- end }} + containers: {{- if and .Values.monitoring.enabled .Values.monitoring.prometheus.enabled }} - name: metrics image: {{ .Values.monitoring.prometheus.image }} diff --git a/helm/defectdojo/templates/initializer-job.yaml b/helm/defectdojo/templates/initializer-job.yaml index 7ed4abbde5..7018c515ce 100644 --- a/helm/defectdojo/templates/initializer-job.yaml +++ b/helm/defectdojo/templates/initializer-job.yaml @@ -15,7 +15,9 @@ metadata: {{- toYaml . | nindent 4 }} {{- end }} spec: + {{- if and (int .Values.initializer.keepSeconds) (gt (int .Values.initializer.keepSeconds) 0) }} ttlSecondsAfterFinished: {{ .Values.initializer.keepSeconds }} + {{- end }} template: metadata: labels: @@ -49,6 +51,26 @@ spec: {{- end }} {{- end }} initContainers: + {{- if .Values.cloudsql.enabled }} + - name: cloudsql-proxy + image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} + imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} + restartPolicy: Always + securityContext: + runAsNonRoot: true + command: ["/cloud_sql_proxy"] + args: + - "-verbose={{ .Values.cloudsql.verbose }}" + {{- if eq .Values.database "postgresql" }} + - "-instances={{ .Values.cloudsql.instance }}=tcp:{{ .Values.postgresql.primary.service.ports.postgresql }}" + {{- end }} + {{- if .Values.cloudsql.enable_iam_login }} + - "-enable_iam_login" + {{- end }} + {{- if .Values.cloudsql.use_private_ip }} + - "-ip_address_types=PRIVATE" + {{- end }} + {{- end }} - name: wait-for-db command: - '/bin/bash' @@ -71,25 +93,6 @@ spec: {{- toYaml . | nindent 8 }} {{- end }} containers: - {{- if .Values.cloudsql.enabled }} - - name: cloudsql-proxy - image: {{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }} - imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy }} - securityContext: - runAsNonRoot: true - command: ["/cloud_sql_proxy"] - args: - - "-verbose={{ .Values.cloudsql.verbose }}" - {{- if eq .Values.database "postgresql" }} - - "-instances={{ .Values.cloudsql.instance }}=tcp:{{ .Values.postgresql.primary.service.ports.postgresql }}" - {{- end }} - {{- if .Values.cloudsql.enable_iam_login }} - - "-enable_iam_login" - {{- end }} - {{- if .Values.cloudsql.use_private_ip }} - - "-ip_address_types=PRIVATE" - {{- end }} - {{- end }} - name: initializer image: "{{ template "initializer.repository" . }}:{{ .Values.tag }}" imagePullPolicy: {{ .Values.imagePullPolicy }} diff --git a/helm/defectdojo/values.yaml b/helm/defectdojo/values.yaml index f480810d43..555d66c475 100644 --- a/helm/defectdojo/values.yaml +++ b/helm/defectdojo/values.yaml @@ -321,7 +321,7 @@ initializer: jobAnnotations: {} annotations: {} labels: {} - keepSeconds: 60 + keepSeconds: 60 # A positive integer will keep this Job and Pod deployed for the specified number of seconds, after which they will be removed. For all other values, the Job and Pod will remain deployed. affinity: {} nodeSelector: {} resources: diff --git a/readme-docs/KUBERNETES.md b/readme-docs/KUBERNETES.md index c0edf4cb29..9c25b753a9 100644 --- a/readme-docs/KUBERNETES.md +++ b/readme-docs/KUBERNETES.md @@ -20,8 +20,7 @@ Starting with version 1.14.0, a helm chart will be pushed onto the `helm-charts` To use it, you can add our repo. ``` -$ helm repo add helm-charts 'https://raw.githubusercontent.com/DefectDojo/django-DefectDojo/helm-charts' -"helm-charts" has been added to your repositories +$ helm repo add defectdojo 'https://raw.githubusercontent.com/DefectDojo/django-DefectDojo/helm-charts' $ helm repo update ``` @@ -31,7 +30,7 @@ You should now be able to see the chart. ``` $ helm search repo defectdojo NAME CHART VERSION APP VERSION DESCRIPTION -helm-charts/defectdojo 1.5.1 1.14.0-dev A Helm chart for Kubernetes to install DefectDojo +defectdojo/defectdojo 1.6.153 2.39.0 A Helm chart for Kubernetes to install DefectDojo ``` ## Kubernetes Local Quickstart @@ -268,6 +267,15 @@ For more detail how how to create proper PVC see [example](https://github.com/De ### Installation +**Important:** If you choose to create the secret on your own, you will need to create a secret named `defectdojo` and containing the following fields: + +- DD_ADMIN_PASSWORD +- DD_SECRET_KEY +- DD_CREDENTIAL_AES_256_KEY +- METRICS_HTTP_AUTH_PASSWORD + +Theses fields are required to get the stack running. + ```zsh # Install Helm chart. Choose a host name that matches the certificate above helm install \ @@ -322,7 +330,7 @@ helm install \ # Run test. helm test defectdojo -# Navigate to . +# Navigate to . ``` ### Prometheus metrics @@ -349,7 +357,7 @@ In your helm install simply pass them as a defined array, for example: This will also work with shell inserted variables: -` --set "alternativeHosts={defectdojo.${TLS_CERT_DOMAIN},localhost}"` +`--set "alternativeHosts={defectdojo.${TLS_CERT_DOMAIN},localhost}"` You will still need to set a host value as well. @@ -375,18 +383,19 @@ extraEnv: value: '26379' ``` - ### How to use an external PostgreSQL DB with Defectdojo #### Step 1: Create a Namespace for DefectDojo To begin, create a dedicated namespace for DefectDojo to isolate its resources: `kubectl create ns defectdojo` + #### Step 2: Create a Secret for PostgreSQL Credentials Set up a Kubernetes Secret to securely store the PostgreSQL user password and database connection URL, which are essential for establishing a secure connection between DefectDojo and your PostgreSQL instance. Apply the secret using the following command: `kubectl apply -f secret.yaml -n defectdojo`. This secret will be referenced within the `extraEnv` section of the DefectDojo Helm values file. Sample secret template (replace the placeholders with your PostgreSQL credentials): + ```YAML apiversion: v1 kind: Secret @@ -394,8 +403,7 @@ metadata: name: defectdojo-postgresql-specific type: Opaque stringData: # I chose stringData for better visualization of the credentials for debugging - postgresql-password: - db-url: psql://:@:/ + password: ``` #### Step 2.5: Install PostgreSQL (Optional) @@ -408,10 +416,9 @@ helm repo update helm install defectdojo-postgresql bitnami/postgresql -n defectdojo -f postgresql/values.yaml ``` - Sample `values.yaml` file for PostgreSQL configuration: -```YAML +```YAML auth:   username: defectdojo   password: @@ -427,18 +434,27 @@ auth: Before installing the DefectDojo Helm chart, it's important to customize the `values.yaml` file. Key areas to modify include specifying the PostgreSQL connection details & the extraEnv block: ```yaml -database: postgresql +database: postgresql # refer to the following configuration + postgresql: -  postgresServer: "defectdojo-postgresql" # point to the hostname of your postgresql server -  enabled: false + enabled: false # Disable the creation of the database in the cluster + postgresServer: "127.0.0.1" # Required to skip certains tests not useful on external instances + auth: + username: defectdojo # your database user + database: defectdojo # your database name + secretKeys: + adminPasswordKey: password # the name of the field containing the password value + userPasswordKey: password # the name of the field containing the password value + replicationPasswordKey: password # the name of the field containing the password value + existingSecret: defectdojo-postgresql-specific # the secret containing your database password -# Specify the postgresql DB connection url for the external postgresql server extraEnv: -  - name: DD_DATABASE_URL -    valueFrom: -      secretKeyRef: -        name: defectdojo-postgresql-specific -        key: db-url +# Overwrite the database endpoint +- name: DD_DATABASE_HOST + value: +# Overwrite the database port +- name: DD_DATABASE_PORT + value: ``` #### Step 4: Deploy DefectDojo diff --git a/requirements.txt b/requirements.txt index 255a22bb7c..5c57c0bbda 100644 --- a/requirements.txt +++ b/requirements.txt @@ -57,7 +57,7 @@ django-debug-toolbar-request-history==0.1.4 vcrpy==6.0.2 vcrpy-unittest==0.1.7 django-tagulous==2.1.0 -PyJWT==2.9.0 +PyJWT==2.10.0 cvss==3.3 django-fieldsignals==0.7.0 hyperlink==21.0.0 @@ -69,7 +69,7 @@ django-ratelimit==4.1.0 argon2-cffi==23.1.0 blackduck==1.1.3 pycurl==7.45.3 # Required for Celery Broker AWS (SQS) support -boto3==1.35.58 # Required for Celery Broker AWS (SQS) support +boto3==1.35.64 # Required for Celery Broker AWS (SQS) support netaddr==1.3.0 vulners==2.2.3 fontawesomefree==6.6.0 diff --git a/ruff.toml b/ruff.toml index c78256a9d2..cface3f73f 100644 --- a/ruff.toml +++ b/ruff.toml @@ -65,7 +65,7 @@ select = [ "TCH", "INT", "ARG003", "ARG004", "ARG005", - "PTH2", "PTH101", "PTH102", "PTH103", "PTH104", "PTH105", "PTH106", "PTH107", "PTH108", "PTH109", "PTH110", "PTH111", "PTH112", "PTH113", "PTH114", "PTH115", "PTH116", "PTH117", "PTH119", "PTH120", "PTH121", "PTH124", + "PTH2", "PTH101", "PTH102", "PTH103", "PTH104", "PTH105", "PTH106", "PTH107", "PTH108", "PTH109", "PTH110", "PTH111", "PTH112", "PTH113", "PTH114", "PTH115", "PTH116", "PTH117", "PTH119", "PTH120", "PTH121", "PTH122", "PTH124", "TD001", "TD004", "TD005", "PD", "PGH", diff --git a/unittests/scans/aws_prowler_v3plus/many_vuln_after_4_5_0.ocsf.json b/unittests/scans/aws_prowler_v3plus/many_vuln_after_4_5_0.ocsf.json new file mode 100644 index 0000000000..36c0219e39 --- /dev/null +++ b/unittests/scans/aws_prowler_v3plus/many_vuln_after_4_5_0.ocsf.json @@ -0,0 +1,247 @@ +[{ + "metadata": { + "event_code": "iam_role_administratoraccess_policy_permissive_trust_relationship", + "product": { + "name": "Prowler", + "vendor_name": "Prowler", + "version": "4.2.1" + }, + "version": "1.2.0" + }, + "severity_id": 4, + "severity": "High", + "status": "New", + "status_code": "FAIL", + "status_detail": "IAM Role myAdministratorExecutionRole has AdministratorAccess policy attached that has too permissive trust relationship.", + "status_id": 3, + "unmapped": { + "check_type": "", + "related_url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_job-functions.html#jf_administrator", + "categories": "trustboundaries", + "depends_on": "", + "related_to": "", + "notes": "CAF Security Epic: IAM", + "compliance": {} + }, + "activity_name": "Create", + "activity_id": 1, + "finding_info": { + "created_time": "2024-06-03T14:15:19.382075", + "desc": "Ensure IAM Roles with attached AdministratorAccess policy have a well defined trust relationship", + "product_uid": "prowler", + "title": "Ensure IAM Roles with attached AdministratorAccess policy have a well defined trust relationship", + "uid": "prowler-aws-iam_role_administratoraccess_policy_permissive_trust_relationship-123456789012-us-east-1-myAdministratorExecutionRole" + }, + "resources": [ + { + "cloud_partition": "aws", + "region": "us-east-1", + "data": { + "details": "" + }, + "group": { + "name": "iam" + }, + "labels": [], + "name": "myAdministratorExecutionRole", + "type": "AwsIamRole", + "uid": "arn:aws:iam::123456789012:role/myAdministratorExecutionRole" + } + ], + "category_name": "Findings", + "category_uid": 2, + "class_name": "DetectionFinding", + "class_uid": 2004, + "cloud": { + "account": { + "name": "", + "type": "AWS_Account", + "type_id": 10, + "uid": "123456789012", + "labels": [] + }, + "org": { + "name": "", + "uid": "" + }, + "provider": "aws", + "region": "us-east-1" + }, + "time_dt": "2024-06-03T14:15:19.382075", + "remediation": { + "desc": "Apply the principle of least privilege. Instead of AdministratorAccess, assign only the permissions necessary for specific roles and tasks. Create custom IAM policies with minimal permissions based on the principle of least privilege. If a role really needs AdministratorAccess, the trust relationship must be well defined to restrict it usage only to the Principal, Action, Audience and Subject intended for it.", + "references": [ + "https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege" + ] + }, + "risk_details": "The AWS-managed AdministratorAccess policy grants all actions for all AWS services and for all resources in the account and as such exposes the customer to a significant data leakage threat. It is therefore particularly important that the trust relationship is well defined to restrict it usage only to the Principal, Action, Audience and Subject intended for it.", + "type_uid": 200401, + "type_name": "Create" +},{ + "metadata": { + "event_code": "iam_role_cross_account_readonlyaccess_policy", + "product": { + "name": "Prowler", + "vendor_name": "Prowler", + "version": "4.2.1" + }, + "version": "1.2.0" + }, + "severity_id": 4, + "severity": "High", + "status": "New", + "status_code": "FAIL", + "status_detail": "IAM Role AuditRole gives cross account read-only access.", + "status_id": 3, + "unmapped": { + "check_type": "", + "related_url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_job-functions.html#awsmp_readonlyaccess", + "categories": "trustboundaries", + "depends_on": "", + "related_to": "", + "notes": "CAF Security Epic: IAM", + "compliance": { + "MITRE-ATTACK": [ + "T1078" + ], + "AWS-Foundational-Technical-Review": [ + "IAM-0012" + ] + } + }, + "activity_name": "Create", + "activity_id": 1, + "finding_info": { + "created_time": "2024-06-03T14:15:19.382075", + "desc": "Ensure IAM Roles do not have ReadOnlyAccess access for external AWS accounts", + "product_uid": "prowler", + "title": "Ensure IAM Roles do not have ReadOnlyAccess access for external AWS accounts", + "uid": "prowler-aws-iam_role_cross_account_readonlyaccess_policy-123456789012-us-east-1-AuditRole" + }, + "resources": [ + { + "cloud_partition": "aws", + "region": "us-east-1", + "data": { + "details": "" + }, + "group": { + "name": "iam" + }, + "labels": [ + "some-label=some value" + ], + "name": "AuditRole", + "type": "AwsIamRole", + "uid": "arn:aws:iam::123456789012:role/AuditRole" + } + ], + "category_name": "Findings", + "category_uid": 2, + "class_name": "DetectionFinding", + "class_uid": 2004, + "cloud": { + "account": { + "name": "", + "type": "AWS_Account", + "type_id": 10, + "uid": "123456789012", + "labels": [] + }, + "org": { + "name": "", + "uid": "" + }, + "provider": "aws", + "region": "us-east-1" + }, + "time_dt": "2024-06-03T14:15:19.382075", + "remediation": { + "desc": "Remove the AWS-managed ReadOnlyAccess policy from all roles that have a trust policy, including third-party cloud accounts, or remove third-party cloud accounts from the trust policy of all roles that need the ReadOnlyAccess policy.", + "references": [ + "https://docs.securestate.vmware.com/rule-docs/aws-iam-role-cross-account-readonlyaccess-policy" + ] + }, + "risk_details": "The AWS-managed ReadOnlyAccess policy is highly potent and exposes the customer to a significant data leakage threat. It should be granted very conservatively. For granting access to 3rd party vendors, consider using alternative managed policies, such as ViewOnlyAccess or SecurityAudit.", + "type_uid": 200401, + "type_name": "Create" +},{ + "metadata": { + "event_code": "iam_role_permissive_trust_relationship", + "product": { + "name": "Prowler", + "vendor_name": "Prowler", + "version": "4.2.1" + }, + "version": "1.2.0" + }, + "severity_id": 4, + "severity": "High", + "status": "Suppressed", + "status_code": "FAIL", + "status_detail": "IAM Role CrossAccountResourceAccessRole has permissive trust relationship to other accounts", + "status_id": 3, + "unmapped": { + "check_type": "", + "related_url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-accounts", + "categories": "trustboundaries", + "depends_on": "", + "related_to": "", + "notes": "CAF Security Epic: IAM", + "compliance": {} + }, + "activity_name": "Create", + "activity_id": 1, + "finding_info": { + "created_time": "2024-06-03T14:15:19.382075", + "desc": "Ensure IAM Roles do not allow assume role from any role of a cross account", + "product_uid": "prowler", + "title": "Ensure IAM Roles do not allow assume role from any role of a cross account", + "uid": "prowler-aws-iam_role_permissive_trust_relationship-123456789012-us-east-1-CrossAccountResourceAccessRole" + }, + "resources": [ + { + "cloud_partition": "aws", + "region": "us-east-1", + "data": { + "details": "" + }, + "group": { + "name": "iam" + }, + "labels": [], + "name": "CrossAccountResourceAccessRole", + "type": "AwsIamRole", + "uid": "arn:aws:iam::123456789012:role/CrossAccountResourceAccessRole" + } + ], + "category_name": "Findings", + "category_uid": 2, + "class_name": "DetectionFinding", + "class_uid": 2004, + "cloud": { + "account": { + "name": "", + "type": "AWS_Account", + "type_id": 10, + "uid": "123456789012", + "labels": [] + }, + "org": { + "name": "", + "uid": "" + }, + "provider": "aws", + "region": "us-east-1" + }, + "time_dt": "2024-06-03T14:15:19.382075", + "remediation": { + "desc": "Ensure IAM Roles do not allow assume role from any role of a cross account but only from specific roles of specific accounts.", + "references": [ + "https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-roles" + ] + }, + "risk_details": "If an IAM role allows assume role from any role of a cross account, it can lead to privilege escalation.", + "type_uid": 200401, + "type_name": "Create" +}] \ No newline at end of file diff --git a/unittests/scans/aws_prowler_v3plus/one_vuln_after_4_5_0.ocsf.json b/unittests/scans/aws_prowler_v3plus/one_vuln_after_4_5_0.ocsf.json new file mode 100644 index 0000000000..e8e79e2f71 --- /dev/null +++ b/unittests/scans/aws_prowler_v3plus/one_vuln_after_4_5_0.ocsf.json @@ -0,0 +1,80 @@ +[{ + "metadata": { + "event_code": "iam_role_administratoraccess_policy_permissive_trust_relationship", + "product": { + "name": "Prowler", + "vendor_name": "Prowler", + "version": "4.2.1" + }, + "version": "1.2.0" + }, + "severity_id": 4, + "severity": "High", + "status": "New", + "status_code": "FAIL", + "status_detail": "IAM Role myAdministratorExecutionRole has AdministratorAccess policy attached that has too permissive trust relationship.", + "status_id": 3, + "unmapped": { + "check_type": "", + "related_url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_job-functions.html#jf_administrator", + "categories": "trustboundaries", + "depends_on": "", + "related_to": "", + "notes": "CAF Security Epic: IAM", + "compliance": {} + }, + "activity_name": "Create", + "activity_id": 1, + "finding_info": { + "created_time": "2024-06-03T14:15:19.382075", + "desc": "Ensure IAM Roles with attached AdministratorAccess policy have a well defined trust relationship", + "product_uid": "prowler", + "title": "Ensure IAM Roles with attached AdministratorAccess policy have a well defined trust relationship", + "uid": "prowler-aws-iam_role_administratoraccess_policy_permissive_trust_relationship-123456789012-us-east-1-myAdministratorExecutionRole" + }, + "resources": [ + { + "cloud_partition": "aws", + "region": "us-east-1", + "data": { + "details": "" + }, + "group": { + "name": "iam" + }, + "labels": [], + "name": "myAdministratorExecutionRole", + "type": "AwsIamRole", + "uid": "arn:aws:iam::123456789012:role/myAdministratorExecutionRole" + } + ], + "category_name": "Findings", + "category_uid": 2, + "class_name": "DetectionFinding", + "class_uid": 2004, + "cloud": { + "account": { + "name": "", + "type": "AWS_Account", + "type_id": 10, + "uid": "123456789012", + "labels": [] + }, + "org": { + "name": "", + "uid": "" + }, + "provider": "aws", + "region": "us-east-1" + }, + "time_dt": "2024-06-03T14:15:19.382075", + "remediation": { + "desc": "Apply the principle of least privilege. Instead of AdministratorAccess, assign only the permissions necessary for specific roles and tasks. Create custom IAM policies with minimal permissions based on the principle of least privilege. If a role really needs AdministratorAccess, the trust relationship must be well defined to restrict it usage only to the Principal, Action, Audience and Subject intended for it.", + "references": [ + "https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege" + ] + }, + "risk_details": "The AWS-managed AdministratorAccess policy grants all actions for all AWS services and for all resources in the account and as such exposes the customer to a significant data leakage threat. It is therefore particularly important that the trust relationship is well defined to restrict it usage only to the Principal, Action, Audience and Subject intended for it.", + "type_uid": 200401, + "type_name": "Create" +}] \ No newline at end of file diff --git a/unittests/scans/trivy_operator/all_reports_in_dict.json b/unittests/scans/trivy_operator/all_reports_in_dict.json new file mode 100644 index 0000000000..1ac8c678c3 --- /dev/null +++ b/unittests/scans/trivy_operator/all_reports_in_dict.json @@ -0,0 +1,674 @@ +{ + "clustercompliancereports.aquasecurity.github.io": [], + "clusterconfigauditreports.aquasecurity.github.io": [], + "clusterinfraassessmentreports.aquasecurity.github.io": [], + "clusterrbacassessmentreports.aquasecurity.github.io": [], + "clustersbomreports.aquasecurity.github.io": [], + "configauditreports.aquasecurity.github.io": [ + { + "apiVersion": "aquasecurity.github.io/v1alpha1", + "kind": "ConfigAuditReport", + "metadata": { + "annotations": { + "trivy-operator.aquasecurity.github.io/report-ttl": "24h0m0s" + }, + "creationTimestamp": "2023-03-23T16:22:54Z", + "generation": 1, + "labels": { + "plugin-config-hash": "659b7b9c46", + "resource-spec-hash": "fc85b485f", + "trivy-operator.resource.kind": "ReplicaSet", + "trivy-operator.resource.name": "nginx-deployment-965685897", + "trivy-operator.resource.namespace": "default" + }, + "name": "replicaset-nginx-deployment-965685897", + "namespace": "default", + "ownerReferences": [ + { + "apiVersion": "apps/v1", + "blockOwnerDeletion": false, + "controller": true, + "kind": "ReplicaSet", + "name": "nginx-deployment-965685897", + "uid": "d19c7f74-b4c3-429d-9a45-1b2f5efc3c88" + } + ], + "resourceVersion": "1268", + "uid": "a92e0951-e988-419d-8602-6852f920ce06" + }, + "report": { + "checks": [ + { + "category": "Kubernetes Security Check", + "checkID": "KSV014", + "description": "An immutable root file system prevents applications from writing to their local disk. This can limit intrusions, as attackers will not be able to tamper with the file system or write foreign executables to disk.", + "messages": [ + "Container 'nginx' of ReplicaSet 'nginx-deployment-965685897' should set 'securityContext.readOnlyRootFilesystem' to true" + ], + "severity": "LOW", + "success": false, + "title": "Root file system is not read-only" + } + ], + "scanner": { + "name": "Trivy", + "vendor": "Aqua Security", + "version": "dev" + }, + "summary": { + "criticalCount": 0, + "highCount": 0, + "lowCount": 1, + "mediumCount": 0 + }, + "updateTimestamp": "2023-03-23T16:22:54Z" + } + } + ], + "exposedsecretreports.aquasecurity.github.io": [ + { + "apiVersion": "aquasecurity.github.io/v1alpha1", + "kind": "ExposedSecretReport", + "metadata": { + "annotations": { + "trivy-operator.aquasecurity.github.io/report-ttl": "24h0m0s" + }, + "creationTimestamp": "2023-03-24T08:34:18Z", + "generation": 1, + "labels": { + "resource-spec-hash": "69497b548d", + "trivy-operator.container.name": "nginx", + "trivy-operator.resource.kind": "ReplicaSet", + "trivy-operator.resource.name": "nginx-deployment-c868466d4", + "trivy-operator.resource.namespace": "default" + }, + "name": "replicaset-nginx-deployment-c868466d4-nginx", + "namespace": "default", + "ownerReferences": [ + { + "apiVersion": "apps/v1", + "blockOwnerDeletion": false, + "controller": true, + "kind": "ReplicaSet", + "name": "nginx-deployment-c868466d4", + "uid": "1f07d5aa-f272-4f3d-ada8-a3f80ab8088d" + } + ], + "resourceVersion": "1798", + "uid": "46490b45-f440-446c-9574-efcd3afe3a92" + }, + "report": { + "artifact": { + "repository": "library/secret", + "tag": "latest" + }, + "registry": { + "server": "index.docker.io" + }, + "scanner": { + "name": "Trivy", + "vendor": "Aqua Security", + "version": "0.38.2" + }, + "secrets": [ + { + "category": "AWS", + "match": "AWS_secret_KEY=\"****************************************\"", + "ruleID": "aws-secret-access-key", + "severity": "CRITICAL", + "target": "root/aws_secret.txt", + "title": "AWS Secret Access Key" + } + ], + "summary": { + "criticalCount": 1, + "highCount": 0, + "lowCount": 0, + "mediumCount": 0 + }, + "updateTimestamp": "2023-03-24T08:34:18Z" + } + } + ], + "infraassessmentreports.aquasecurity.github.io": [], + "rbacassessmentreports.aquasecurity.github.io": [], + "sbomreports.aquasecurity.github.io": [], + "vulnerabilityreports.aquasecurity.github.io": [ + { + "apiVersion": "aquasecurity.github.io/v1alpha1", + "kind": "VulnerabilityReport", + "metadata": { + "annotations": { + "trivy-operator.aquasecurity.github.io/report-ttl": "24h0m0s" + }, + "creationTimestamp": "2023-03-23T16:23:01Z", + "generation": 1, + "labels": { + "resource-spec-hash": "fc85b485f", + "trivy-operator.container.name": "nginx", + "trivy-operator.resource.kind": "ReplicaSet", + "trivy-operator.resource.name": "nginx-deployment-965685897", + "trivy-operator.resource.namespace": "default" + }, + "name": "replicaset-nginx-deployment-965685897-nginx", + "namespace": "default", + "ownerReferences": [ + { + "apiVersion": "apps/v1", + "blockOwnerDeletion": false, + "controller": true, + "kind": "ReplicaSet", + "name": "nginx-deployment-965685897", + "uid": "d19c7f74-b4c3-429d-9a45-1b2f5efc3c88" + } + ], + "resourceVersion": "1293", + "uid": "b21503db-3075-4bb5-a259-aa33dbbaef4c" + }, + "report": { + "artifact": { + "repository": "library/nginx", + "tag": "alpine" + }, + "registry": { + "server": "index.docker.io" + }, + "scanner": { + "name": "Trivy", + "vendor": "Aqua Security", + "version": "0.38.2" + }, + "summary": { + "criticalCount": 2, + "highCount": 3, + "lowCount": 11, + "mediumCount": 25, + "noneCount": 0, + "unknownCount": 0 + }, + "updateTimestamp": "2023-03-23T16:23:01Z", + "vulnerabilities": [ + { + "fixedVersion": "7.87.0-r2", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-23914", + "resource": "curl", + "score": 4.2, + "severity": "CRITICAL", + "target": "", + "title": "curl: HSTS ignored on multiple requests", + "vulnerabilityID": "CVE-2023-23914" + }, + { + "fixedVersion": "7.87.0-r2", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-23916", + "resource": "curl", + "score": 6.5, + "severity": "HIGH", + "target": "", + "title": "curl: HTTP multi-header compression denial of service", + "vulnerabilityID": "CVE-2023-23916" + }, + { + "fixedVersion": "7.87.0-r2", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-23915", + "resource": "curl", + "score": 4.2, + "severity": "MEDIUM", + "target": "", + "title": "curl: HSTS amnesia with --parallel", + "vulnerabilityID": "CVE-2023-23915" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27535", + "resource": "curl", + "severity": "MEDIUM", + "target": "", + "title": "curl: FTP too eager connection reuse", + "vulnerabilityID": "CVE-2023-27535" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27533", + "resource": "curl", + "severity": "LOW", + "target": "", + "title": "curl: TELNET option IAC injection", + "vulnerabilityID": "CVE-2023-27533" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27534", + "resource": "curl", + "severity": "LOW", + "target": "", + "title": "curl: SFTP path ~ resolving discrepancy", + "vulnerabilityID": "CVE-2023-27534" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27536", + "resource": "curl", + "severity": "LOW", + "target": "", + "title": "curl: GSS delegation too eager connection re-use", + "vulnerabilityID": "CVE-2023-27536" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27537", + "resource": "curl", + "severity": "LOW", + "target": "", + "title": "curl: HSTS double-free", + "vulnerabilityID": "CVE-2023-27537" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27538", + "resource": "curl", + "severity": "LOW", + "target": "", + "title": "curl: SSH connection too eager reuse still", + "vulnerabilityID": "CVE-2023-27538" + }, + { + "fixedVersion": "3.0.8-r1", + "installedVersion": "3.0.8-r0", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0464", + "resource": "libcrypto3", + "severity": "MEDIUM", + "target": "", + "title": "A security vulnerability has been identified in all supported versions ...", + "vulnerabilityID": "CVE-2023-0464" + }, + { + "fixedVersion": "7.87.0-r2", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-23914", + "resource": "libcurl", + "score": 4.2, + "severity": "CRITICAL", + "target": "", + "title": "curl: HSTS ignored on multiple requests", + "vulnerabilityID": "CVE-2023-23914" + }, + { + "fixedVersion": "7.87.0-r2", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-23916", + "resource": "libcurl", + "score": 6.5, + "severity": "HIGH", + "target": "", + "title": "curl: HTTP multi-header compression denial of service", + "vulnerabilityID": "CVE-2023-23916" + }, + { + "fixedVersion": "7.87.0-r2", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-23915", + "resource": "libcurl", + "score": 4.2, + "severity": "MEDIUM", + "target": "", + "title": "curl: HSTS amnesia with --parallel", + "vulnerabilityID": "CVE-2023-23915" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27535", + "resource": "libcurl", + "severity": "MEDIUM", + "target": "", + "title": "curl: FTP too eager connection reuse", + "vulnerabilityID": "CVE-2023-27535" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27533", + "resource": "libcurl", + "severity": "LOW", + "target": "", + "title": "curl: TELNET option IAC injection", + "vulnerabilityID": "CVE-2023-27533" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27534", + "resource": "libcurl", + "severity": "LOW", + "target": "", + "title": "curl: SFTP path ~ resolving discrepancy", + "vulnerabilityID": "CVE-2023-27534" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27536", + "resource": "libcurl", + "severity": "LOW", + "target": "", + "title": "curl: GSS delegation too eager connection re-use", + "vulnerabilityID": "CVE-2023-27536" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27537", + "resource": "libcurl", + "severity": "LOW", + "target": "", + "title": "curl: HSTS double-free", + "vulnerabilityID": "CVE-2023-27537" + }, + { + "fixedVersion": "7.88.1-r1", + "installedVersion": "7.87.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-27538", + "resource": "libcurl", + "severity": "LOW", + "target": "", + "title": "curl: SSH connection too eager reuse still", + "vulnerabilityID": "CVE-2023-27538" + }, + { + "fixedVersion": "3.0.8-r1", + "installedVersion": "3.0.8-r0", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0464", + "resource": "libssl3", + "severity": "MEDIUM", + "target": "", + "title": "A security vulnerability has been identified in all supported versions ...", + "vulnerabilityID": "CVE-2023-0464" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-3970", + "resource": "tiff", + "score": 8.8, + "severity": "HIGH", + "target": "", + "title": "libtiff: integer overflow in function TIFFReadRGBATileExt of the file", + "vulnerabilityID": "CVE-2022-3970" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-3570", + "resource": "tiff", + "score": 5.5, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: heap Buffer overflows in tiffcrop.c", + "vulnerabilityID": "CVE-2022-3570" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-3597", + "resource": "tiff", + "score": 6.5, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in _TIFFmemcpy in libtiff/tif_unix", + "vulnerabilityID": "CVE-2022-3597" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-3598", + "resource": "tiff", + "score": 6.5, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in extractContigSamplesShifted24bits in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2022-3598" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-3599", + "resource": "tiff", + "score": 6.5, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds read in writeSingleSection in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2022-3599" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-3626", + "resource": "tiff", + "score": 6.5, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in _TIFFmemset in libtiff/tif_unix.c", + "vulnerabilityID": "CVE-2022-3626" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-3627", + "resource": "tiff", + "score": 6.5, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in _TIFFmemcpy in libtiff/tif_unix.c", + "vulnerabilityID": "CVE-2022-3627" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-4645", + "resource": "tiff", + "score": 5.6, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds read in tiffcp in tools/tiffcp.c", + "vulnerabilityID": "CVE-2022-4645" + }, + { + "fixedVersion": "4.4.0-r2", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2022-48281", + "resource": "tiff", + "score": 5.5, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: heap-based buffer overflow in processCropSelections() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2022-48281" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0795", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds read in extractContigSamplesShifted16bits() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0795" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0796", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds read in extractContigSamplesShifted24bits() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0796" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0797", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds read in _TIFFmemcpy() in libtiff/tif_unix.c when called by functions in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0797" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0798", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds read in extractContigSamplesShifted8bits() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0798" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0799", + "resource": "tiff", + "score": 5.5, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: use-after-free in extractContigSamplesShifted32bits() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0799" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0800", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in extractContigSamplesShifted16bits() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0800" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0801", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in _TIFFmemcpy() in libtiff/tif_unix.c when called by functions in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0801" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0802", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in extractContigSamplesShifted32bits() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0802" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0803", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in extractContigSamplesShifted16bits() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0803" + }, + { + "fixedVersion": "4.4.0-r3", + "installedVersion": "4.4.0-r1", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2023-0804", + "resource": "tiff", + "score": 6.1, + "severity": "MEDIUM", + "target": "", + "title": "libtiff: out-of-bounds write in extractContigSamplesShifted24bits() in tools/tiffcrop.c", + "vulnerabilityID": "CVE-2023-0804" + }, + { + "fixedVersion": "", + "installedVersion": "v1.44.212", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2020-8911", + "resource": "github.com/aws/aws-sdk-go", + "score": 5.6, + "severity": "MEDIUM", + "target": "", + "title": "aws/aws-sdk-go: CBC padding oracle issue in AWS S3 Crypto SDK for golang", + "vulnerabilityID": "CVE-2020-8911" + }, + { + "fixedVersion": "", + "installedVersion": "v1.44.212", + "links": [], + "primaryLink": "https://avd.aquasec.com/nvd/cve-2020-8912", + "resource": "github.com/aws/aws-sdk-go", + "score": 2.5, + "severity": "LOW", + "target": "", + "title": "aws-sdk-go: In-band key negotiation issue in AWS S3 Crypto SDK for golang", + "vulnerabilityID": "CVE-2020-8912" + } + ] + } + } + ] + } + \ No newline at end of file diff --git a/unittests/test_rest_framework.py b/unittests/test_rest_framework.py index 8b2c75e762..8a47c4288b 100644 --- a/unittests/test_rest_framework.py +++ b/unittests/test_rest_framework.py @@ -2989,6 +2989,11 @@ def __init__(self, *args, **kwargs): self.deleted_objects = 5 BaseClass.RESTEndpointTest.__init__(self, *args, **kwargs) + def test_link_engagement_questionnaire(self): + end_url = self.url + "4/link_engagement/2/" + result = self.client.post(end_url) + self.assertEqual(result.status_code, status.HTTP_200_OK, f"Failed to link enagement survey to engagement: {result.content} on {end_url}") + class AnsweredSurveyTest(BaseClass.BaseClassTest): fixtures = ["questionnaire_testdata.json"] diff --git a/unittests/tools/test_aws_prowler_v3plus_parser.py b/unittests/tools/test_aws_prowler_v3plus_parser.py index 6eb22c296c..5ef20b764a 100644 --- a/unittests/tools/test_aws_prowler_v3plus_parser.py +++ b/unittests/tools/test_aws_prowler_v3plus_parser.py @@ -1,3 +1,5 @@ +from datetime import date + from dojo.models import Test from dojo.tools.aws_prowler_v3plus.parser import AWSProwlerV3plusParser from unittests.dojo_test_case import DojoTestCase @@ -23,6 +25,7 @@ def test_aws_prowler_parser_with_critical_vuln_has_one_findings_json(self): self.assertIn("Check if ACM Certificates are about to expire in specific days or less", findings[0].description) self.assertEqual("arn:aws:acm:us-east-1:999999999999:certificate/ffffffff-0000-0000-0000-000000000000", findings[0].component_name) self.assertIn("https://docs.aws.amazon.com/config/latest/developerguide/acm-certificate-expiration-check.html", findings[0].references) + self.assertEqual(date.fromisoformat("2023-04-25"), findings[0].date) def test_aws_prowler_parser_with_many_vuln_has_many_findings_json(self): findings = self.setup( @@ -31,18 +34,44 @@ def test_aws_prowler_parser_with_many_vuln_has_many_findings_json(self): with self.subTest(i=0): self.assertEqual("prowler-aws-acm_certificates_expiration_check-999999999999-us-east-1-api.teste.teste.com", findings[0].unique_id_from_tool) self.assertIn("Check if ACM Certificates are about to expire in specific days or less", findings[0].description) + self.assertEqual(date.fromisoformat("2023-04-25"), findings[0].date) with self.subTest(i=1): self.assertEqual("prowler-aws-accessanalyzer_enabled-999999999999-us-east-1-999999999999", findings[1].unique_id_from_tool) self.assertIn("Check if IAM Access Analyzer is enabled", findings[1].description) + self.assertEqual(date.fromisoformat("2023-04-25"), findings[1].date) with self.subTest(i=3): self.assertEqual("prowler-aws-account_maintain_current_contact_details-999999999999-us-east-1-999999999999", findings[2].unique_id_from_tool) self.assertIn("Maintain current contact details.", findings[2].description) + self.assertEqual(date.fromisoformat("2023-04-25"), findings[2].date) def test_aws_prowler_parser_with_no_vuln_has_no_findings_ocsf_json(self): findings = self.setup( open("unittests/scans/aws_prowler_v3plus/no_vuln.ocsf.json", encoding="utf-8")) self.assertEqual(0, len(findings)) + def test_aws_prowler_parser_after_4_5_0_with_critical_vuln_has_one_findings_ocsf_json(self): + findings = self.setup( + open("unittests/scans/aws_prowler_v3plus/one_vuln_after_4_5_0.ocsf.json", encoding="utf-8")) + self.assertEqual(1, len(findings)) + self.assertEqual("prowler-aws-iam_role_administratoraccess_policy_permissive_trust_relationship-123456789012-us-east-1-myAdministratorExecutionRole", findings[0].unique_id_from_tool) + self.assertIn("Ensure IAM Roles with attached AdministratorAccess policy have a well defined trust relationship", findings[0].description) + self.assertEqual("arn:aws:iam::123456789012:role/myAdministratorExecutionRole", findings[0].component_name) + self.assertIn("https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege", findings[0].references) + self.assertEqual(date.fromisoformat("2024-06-03"), findings[0].date) + + def test_aws_prowler_parser_after_4_5_0_with_many_vuln_has_many_findings_ocsf_json(self): + findings = self.setup( + open("unittests/scans/aws_prowler_v3plus/many_vuln_after_4_5_0.ocsf.json", encoding="utf-8")) + self.assertEqual(2, len(findings)) + with self.subTest(i=0): + self.assertEqual("prowler-aws-iam_role_administratoraccess_policy_permissive_trust_relationship-123456789012-us-east-1-myAdministratorExecutionRole", findings[0].unique_id_from_tool) + self.assertIn("Ensure IAM Roles with attached AdministratorAccess policy have a well defined trust relationship", findings[0].description) + self.assertEqual(date.fromisoformat("2024-06-03"), findings[0].date) + with self.subTest(i=1): + self.assertEqual("prowler-aws-iam_role_cross_account_readonlyaccess_policy-123456789012-us-east-1-AuditRole", findings[1].unique_id_from_tool) + self.assertIn("Ensure IAM Roles do not have ReadOnlyAccess access for external AWS accounts", findings[1].description) + self.assertEqual(date.fromisoformat("2024-06-03"), findings[1].date) + def test_aws_prowler_parser_with_critical_vuln_has_one_findings_ocsf_json(self): findings = self.setup( open("unittests/scans/aws_prowler_v3plus/one_vuln.ocsf.json", encoding="utf-8")) @@ -51,6 +80,7 @@ def test_aws_prowler_parser_with_critical_vuln_has_one_findings_ocsf_json(self): self.assertIn("Ensure IAM Roles with attached AdministratorAccess policy have a well defined trust relationship", findings[0].description) self.assertEqual("arn:aws:iam::123456789012:role/myAdministratorExecutionRole", findings[0].component_name) self.assertIn("https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege", findings[0].references) + self.assertEqual(date.fromisoformat("2024-06-03"), findings[0].date) def test_aws_prowler_parser_with_many_vuln_has_many_findings_ocsf_json(self): findings = self.setup( @@ -59,6 +89,8 @@ def test_aws_prowler_parser_with_many_vuln_has_many_findings_ocsf_json(self): with self.subTest(i=0): self.assertEqual("prowler-aws-iam_role_administratoraccess_policy_permissive_trust_relationship-123456789012-us-east-1-myAdministratorExecutionRole", findings[0].unique_id_from_tool) self.assertIn("Ensure IAM Roles with attached AdministratorAccess policy have a well defined trust relationship", findings[0].description) + self.assertEqual(date.fromisoformat("2024-06-03"), findings[0].date) with self.subTest(i=1): self.assertEqual("prowler-aws-iam_role_cross_account_readonlyaccess_policy-123456789012-us-east-1-AuditRole", findings[1].unique_id_from_tool) self.assertIn("Ensure IAM Roles do not have ReadOnlyAccess access for external AWS accounts", findings[1].description) + self.assertEqual(date.fromisoformat("2024-06-03"), findings[1].date) diff --git a/unittests/tools/test_kubescape_parser.py b/unittests/tools/test_kubescape_parser.py index caacd8fe85..346cda0401 100644 --- a/unittests/tools/test_kubescape_parser.py +++ b/unittests/tools/test_kubescape_parser.py @@ -8,7 +8,7 @@ def test_parse_file_has_many_findings(self): with open(get_unit_tests_path() + "/scans/kubescape/many_findings.json", encoding="utf-8") as testfile: parser = KubescapeParser() findings = parser.get_findings(testfile, Test()) - self.assertEqual(710, len(findings)) + self.assertEqual(349, len(findings)) def test_parse_file_has_many_results(self): with open(get_unit_tests_path() + "/scans/kubescape/results.json", encoding="utf-8") as testfile: diff --git a/unittests/tools/test_trivy_operator_parser.py b/unittests/tools/test_trivy_operator_parser.py index 5e4a71558d..0acf11cb70 100644 --- a/unittests/tools/test_trivy_operator_parser.py +++ b/unittests/tools/test_trivy_operator_parser.py @@ -25,7 +25,7 @@ def test_configauditreport_single_vulns(self): finding = findings[0] self.assertEqual("Low", finding.severity) self.assertEqual(1, len(finding.unsaved_vulnerability_ids)) - self.assertEqual("KSV014", finding.unsaved_vulnerability_ids[0]) + self.assertEqual("AVD-KSV-0014", finding.unsaved_vulnerability_ids[0]) self.assertEqual("KSV014 - Root file system is not read-only", finding.title) def test_configauditreport_many_vulns(self): @@ -36,12 +36,12 @@ def test_configauditreport_many_vulns(self): finding = findings[0] self.assertEqual("Low", finding.severity) self.assertEqual(1, len(finding.unsaved_vulnerability_ids)) - self.assertEqual("KSV014", finding.unsaved_vulnerability_ids[0]) + self.assertEqual("AVD-KSV-0014", finding.unsaved_vulnerability_ids[0]) self.assertEqual("KSV014 - Root file system is not read-only", finding.title) finding = findings[1] self.assertEqual("Low", finding.severity) self.assertEqual(1, len(finding.unsaved_vulnerability_ids)) - self.assertEqual("KSV016", finding.unsaved_vulnerability_ids[0]) + self.assertEqual("AVD-KSV-0016", finding.unsaved_vulnerability_ids[0]) self.assertEqual("KSV016 - Memory requests not specified", finding.title) def test_vulnerabilityreport_no_vuln(self): @@ -96,8 +96,6 @@ def test_exposedsecretreport_single_vulns(self): self.assertEqual(len(findings), 1) finding = findings[0] self.assertEqual("Critical", finding.severity) - self.assertEqual(1, len(finding.unsaved_vulnerability_ids)) - self.assertEqual("aws-secret-access-key", finding.unsaved_vulnerability_ids[0]) self.assertEqual("aws-secret-access-key", finding.references) self.assertEqual("root/aws_secret.txt", finding.file_path) self.assertEqual("Secret detected in root/aws_secret.txt - AWS Secret Access Key", finding.title) @@ -109,15 +107,11 @@ def test_exposedsecretreport_many(self): self.assertEqual(len(findings), 2) finding = findings[0] self.assertEqual("Critical", finding.severity) - self.assertEqual(1, len(finding.unsaved_vulnerability_ids)) - self.assertEqual("aws-secret-access-key", finding.unsaved_vulnerability_ids[0]) self.assertEqual("aws-secret-access-key", finding.references) self.assertEqual("root/aws_secret.txt", finding.file_path) self.assertEqual("Secret detected in root/aws_secret.txt - AWS Secret Access Key", finding.title) finding = findings[1] self.assertEqual("Critical", finding.severity) - self.assertEqual(1, len(finding.unsaved_vulnerability_ids)) - self.assertEqual("github-pat", finding.unsaved_vulnerability_ids[0]) self.assertEqual("github-pat", finding.references) self.assertEqual("root/github_secret.txt", finding.file_path) self.assertEqual("Secret detected in root/github_secret.txt - GitHub Personal Access Token", finding.title) @@ -163,3 +157,9 @@ def test_findings_in_list(self): parser = TrivyOperatorParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(len(findings), 18) + + def test_findings_all_reports_in_dict(self): + with open(sample_path("all_reports_in_dict.json"), encoding="utf-8") as test_file: + parser = TrivyOperatorParser() + findings = parser.get_findings(test_file, Test()) + self.assertEqual(len(findings), 43)