diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 6e100f43b57..c9f73c97ce3 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -1,3 +1,4 @@ +import collections import json import logging import re @@ -280,10 +281,10 @@ def _pop_tags(self, validated_data): return (to_be_tagged, validated_data) -class RequestResponseDict(list): +class RequestResponseDict(collections.UserList): def __init__(self, *args, **kwargs): pretty_print = kwargs.pop("pretty_print", True) - list.__init__(self, *args, **kwargs) + collections.UserList.__init__(self, *args, **kwargs) self.pretty_print = pretty_print def __add__(self, rhs): @@ -2697,6 +2698,11 @@ class ReportGenerateSerializer(serializers.Serializer): ) +class EngagementUpdateJiraEpicSerializer(serializers.Serializer): + epic_name = serializers.CharField(required=False, max_length=200) + epic_priority = serializers.CharField(required=False, allow_null=True) + + class TagSerializer(serializers.Serializer): tags = TagListSerializerField(required=True) diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index 52978f3b241..e6dd30dab4d 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -639,6 +639,36 @@ def download_file(self, request, file_id, pk=None): # send file return generate_file_response(file_object) + @extend_schema( + request=serializers.EngagementUpdateJiraEpicSerializer, + responses={status.HTTP_200_OK: serializers.EngagementUpdateJiraEpicSerializer}, + ) + @action( + detail=True, methods=["post"], permission_classes=[IsAuthenticated], + ) + def update_jira_epic(self, request, pk=None): + engagement = self.get_object() + try: + + if engagement.has_jira_issue: + jira_helper.update_epic(engagement, **request.data) + response = Response( + {"info": "Jira Epic update query sent"}, + status=status.HTTP_200_OK, + ) + else: + jira_helper.add_epic(engagement, **request.data) + response = Response( + {"info": "Jira Epic create query sent"}, + status=status.HTTP_200_OK, + ) + return response + except ValidationError: + return Response( + {"error": "Bad Request!"}, + status=status.HTTP_400_BAD_REQUEST, + ) + # @extend_schema_view(**schema_with_prefetch()) # Nested models with prefetch make the response schema too long for Swagger UI diff --git a/dojo/finding/views.py b/dojo/finding/views.py index 0c0d78d6cc5..647bcccb43b 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -135,6 +135,9 @@ def prefetch_for_findings(findings, prefetch_type="all", exclude_untouched=True) if isinstance( findings, QuerySet, ): # old code can arrive here with prods being a list because the query was already executed + prefetched_findings = prefetched_findings.prefetch_related( + "reviewers", + ) prefetched_findings = prefetched_findings.prefetch_related("reporter") prefetched_findings = prefetched_findings.prefetch_related( "jira_issue__jira_project__jira_instance", diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index df21ad766ff..f13dc031347 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -1251,7 +1251,9 @@ def close_epic(eng, push_to_jira, **kwargs): r = requests.post( url=req_url, auth=HTTPBasicAuth(jira_instance.username, jira_instance.password), - json=json_data) + json=json_data, + timeout=settings.REQUESTS_TIMEOUT, + ) if r.status_code != 204: logger.warning(f"JIRA close epic failed with error: {r.text}") return False @@ -1289,7 +1291,14 @@ def update_epic(engagement, **kwargs): if not epic_name: epic_name = engagement.name - issue.update(summary=epic_name, description=epic_name) + epic_priority = kwargs.get("epic_priority", None) + + jira_issue_update_kwargs = { + "summary": epic_name, + "description": epic_name, + "priority": {"name": epic_priority}, + } + issue.update(**jira_issue_update_kwargs) return True except JIRAError as e: logger.exception(e) diff --git a/dojo/management/commands/import_github_languages.py b/dojo/management/commands/import_github_languages.py index 09e1be8bd20..9e1c45ffb32 100644 --- a/dojo/management/commands/import_github_languages.py +++ b/dojo/management/commands/import_github_languages.py @@ -2,6 +2,7 @@ import logging import requests +from django.conf import settings from django.core.management.base import BaseCommand from dojo.models import Language_Type @@ -22,7 +23,12 @@ def handle(self, *args, **options): logger.info("Started importing languages from GitHub ...") try: - deserialized = json.loads(requests.get("https://raw.githubusercontent.com/ozh/github-colors/master/colors.json").text) + deserialized = json.loads( + requests.get( + "https://raw.githubusercontent.com/ozh/github-colors/master/colors.json", + timeout=settings.REQUESTS_TIMEOUT, + ).text, + ) except: msg = "Invalid format" raise Exception(msg) diff --git a/dojo/notifications/helper.py b/dojo/notifications/helper.py index 46d0339dd33..b831ef4ed72 100644 --- a/dojo/notifications/helper.py +++ b/dojo/notifications/helper.py @@ -227,7 +227,9 @@ def _post_slack_message(channel): "channel": channel, "username": get_system_setting("slack_username"), "text": create_notification_message(event, user, "slack", *args, **kwargs), - }) + }, + timeout=settings.REQUESTS_TIMEOUT, + ) if "error" in res.text: logger.error("Slack is complaining. See raw text below.") @@ -284,7 +286,9 @@ def send_msteams_notification(event, user=None, *args, **kwargs): res = requests.request( method="POST", url=get_system_setting("msteams_url"), - data=create_notification_message(event, None, "msteams", *args, **kwargs)) + data=create_notification_message(event, None, "msteams", *args, **kwargs), + timeout=settings.REQUESTS_TIMEOUT, + ) if res.status_code != 200: logger.error("Error when sending message to Microsoft Teams") logger.error(res.status_code) @@ -518,7 +522,9 @@ def get_slack_user_id(user_email): res = requests.request( method="POST", url="https://slack.com/api/users.lookupByEmail", - data={"token": get_system_setting("slack_token"), "email": user_email}) + data={"token": get_system_setting("slack_token"), "email": user_email}, + timeout=settings.REQUESTS_TIMEOUT, + ) user = json.loads(res.text) diff --git a/dojo/pipeline.py b/dojo/pipeline.py index ee2dc0ae186..2aa17ebd88b 100644 --- a/dojo/pipeline.py +++ b/dojo/pipeline.py @@ -81,7 +81,11 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs): request_headers = {"Authorization": "Bearer " + token} if is_group_id(group_from_response): logger.debug("detected " + group_from_response + " as groupID and will fetch the displayName from microsoft graph") - group_name_request = requests.get((str(soc.extra_data["resource"]) + "/v1.0/groups/" + str(group_from_response) + "?$select=displayName"), headers=request_headers) + group_name_request = requests.get( + (str(soc.extra_data["resource"]) + "/v1.0/groups/" + str(group_from_response) + "?$select=displayName"), + headers=request_headers, + timeout=settings.REQUESTS_TIMEOUT, + ) group_name_request.raise_for_status() group_name_request_json = group_name_request.json() group_name = group_name_request_json["displayName"] diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 005dae5adb5..ec1e520c89f 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -307,6 +307,9 @@ DD_NOTIFICATIONS_SYSTEM_LEVEL_TRUMP=(list, ["user_mentioned", "review_requested"]), # When enabled, force the password field to be required for creating/updating users DD_REQUIRE_PASSWORD_ON_USER=(bool, True), + # For HTTP requests, how long connection is open before timeout + # This settings apply only on requests performed by "requests" lib used in Dojo code (if some included lib is using "requests" as well, this does not apply there) + DD_REQUESTS_TIMEOUT=(int, 30), ) @@ -1772,6 +1775,11 @@ def saml2_attrib_map_format(dict): # ------------------------------------------------------------------------------ NOTIFICATIONS_SYSTEM_LEVEL_TRUMP = env("DD_NOTIFICATIONS_SYSTEM_LEVEL_TRUMP") +# ------------------------------------------------------------------------------ +# Timeouts +# ------------------------------------------------------------------------------ +REQUESTS_TIMEOUT = env("DD_REQUESTS_TIMEOUT") + # ------------------------------------------------------------------------------ # Ignored Warnings # ------------------------------------------------------------------------------ diff --git a/dojo/templates/dojo/findings_list_snippet.html b/dojo/templates/dojo/findings_list_snippet.html index eb0c2d7135f..51e85ab8d1b 100644 --- a/dojo/templates/dojo/findings_list_snippet.html +++ b/dojo/templates/dojo/findings_list_snippet.html @@ -384,6 +384,11 @@

{% trans "Planned Remediation" %} + {% if filter_name != 'Closed' %} + + {% trans "Reviewers" %} + + {% endif %} {% endblock header %} @@ -699,6 +704,16 @@

{% if finding.planned_remediation_date %}{{ finding.planned_remediation_date }}{% endif %} + {% if filter_name != 'Closed' %} + + {% if finding.reviewers %} + {% for reviewer in finding.reviewers.all %} + {{reviewer.get_full_name}} + {% if not forloop.last %}
{% endif %} + {% endfor %} + {% endif %} + + {% endif %} {% endblock body %} {% endfor %} @@ -779,6 +794,9 @@

{% endif %} { "data": "service" }, { "data": "planned_remediation_date" }, + {% if filter_name != 'Closed' %} + { "data": "reviewers" }, + {% endif %} ]; {% endblock datatables_columns %} diff --git a/dojo/tools/api_bugcrowd/api_client.py b/dojo/tools/api_bugcrowd/api_client.py index f180d64325f..7e9ac2b91cd 100644 --- a/dojo/tools/api_bugcrowd/api_client.py +++ b/dojo/tools/api_bugcrowd/api_client.py @@ -1,6 +1,7 @@ from urllib.parse import urlencode import requests +from django.conf import settings class BugcrowdAPI: @@ -52,7 +53,10 @@ def get_findings(self, program, target): next = f"{self.bugcrowd_api_url}/submissions?{params_encoded}" while next != "": - response = self.session.get(url=next) + response = self.session.get( + url=next, + timeout=settings.REQUESTS_TIMEOUT, + ) response.raise_for_status() if response.ok: data = response.json() @@ -75,12 +79,14 @@ def test_connection(self): # Request programs response_programs = self.session.get( url=f"{self.bugcrowd_api_url}/programs", + timeout=settings.REQUESTS_TIMEOUT, ) response_programs.raise_for_status() # Request submissions to validate the org token response_subs = self.session.get( url=f"{self.bugcrowd_api_url}/submissions", + timeout=settings.REQUESTS_TIMEOUT, ) response_subs.raise_for_status() if response_programs.ok and response_subs.ok: @@ -95,6 +101,7 @@ def test_connection(self): # Request targets to validate the org token response_targets = self.session.get( url=f"{self.bugcrowd_api_url}/targets", + timeout=settings.REQUESTS_TIMEOUT, ) response_targets.raise_for_status() if response_targets.ok: diff --git a/dojo/tools/api_cobalt/api_client.py b/dojo/tools/api_cobalt/api_client.py index 21318143e27..acd01635e90 100644 --- a/dojo/tools/api_cobalt/api_client.py +++ b/dojo/tools/api_cobalt/api_client.py @@ -1,4 +1,5 @@ import requests +from django.conf import settings class CobaltAPI: @@ -36,6 +37,7 @@ def get_assets(self): response = self.session.get( url=f"{self.cobalt_api_url}/assets?limit=1000", headers=self.get_headers(), + timeout=settings.REQUESTS_TIMEOUT, ) if response.ok: @@ -56,6 +58,7 @@ def get_findings(self, asset_id): response = self.session.get( url=f"{self.cobalt_api_url}/findings?limit=1000&asset={asset_id}", headers=self.get_headers(), + timeout=settings.REQUESTS_TIMEOUT, ) if response.ok: @@ -72,12 +75,14 @@ def test_connection(self): response_orgs = self.session.get( url=f"{self.cobalt_api_url}/orgs", headers=self.get_headers(), + timeout=settings.REQUESTS_TIMEOUT, ) # Request assets to validate the org token response_assets = self.session.get( url=f"{self.cobalt_api_url}/assets", headers=self.get_headers(), + timeout=settings.REQUESTS_TIMEOUT, ) if response_orgs.ok and response_assets.ok: diff --git a/dojo/tools/api_edgescan/api_client.py b/dojo/tools/api_edgescan/api_client.py index df0de92a471..580d753226c 100644 --- a/dojo/tools/api_edgescan/api_client.py +++ b/dojo/tools/api_edgescan/api_client.py @@ -2,6 +2,7 @@ from json.decoder import JSONDecodeError import requests +from django.conf import settings class EdgescanAPI: @@ -42,6 +43,7 @@ def get_findings(self, asset_ids): url=url, headers=self.get_headers(), proxies=self.get_proxies(), + timeout=settings.REQUESTS_TIMEOUT, ) response.raise_for_status() return response.json() diff --git a/dojo/tools/api_sonarqube/api_client.py b/dojo/tools/api_sonarqube/api_client.py index 9dd512efe99..9c04ceea648 100644 --- a/dojo/tools/api_sonarqube/api_client.py +++ b/dojo/tools/api_sonarqube/api_client.py @@ -1,4 +1,5 @@ import requests +from django.conf import settings from requests.exceptions import JSONDecodeError as RequestsJSONDecodeError from dojo.utils import prepare_for_view @@ -75,6 +76,7 @@ def find_project(self, project_name, organization=None, branch=None): url=f"{self.sonar_api_url}/components/search", params=parameters, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: @@ -120,6 +122,7 @@ def get_project(self, project_key, organization=None, branch=None): url=f"{self.sonar_api_url}/components/show", params=parameters, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: @@ -173,6 +176,7 @@ def find_issues( url=f"{self.sonar_api_url}/issues/search", params=request_filter, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: @@ -215,6 +219,7 @@ def find_hotspots(self, project_key, organization=None, branch=None): url=f"{self.sonar_api_url}/hotspots/search", params=request_filter, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: @@ -250,6 +255,7 @@ def get_issue(self, issue_key): url=f"{self.sonar_api_url}/issues/search", params=request_filter, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: @@ -290,6 +296,7 @@ def get_rule(self, rule_id, organization=None): url=f"{self.sonar_api_url}/rules/show", params=request_filter, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: msg = ( @@ -314,6 +321,7 @@ def get_hotspot_rule(self, rule_id): url=f"{self.sonar_api_url}/hotspots/show", params={"hotspot": rule_id}, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: msg = ( @@ -357,6 +365,7 @@ def transition_issue(self, issue_key, transition): url=f"{self.sonar_api_url}/issues/do_transition", data={"issue": issue_key, "transition": transition}, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: @@ -378,6 +387,7 @@ def add_comment(self, issue_key, text): url=f"{self.sonar_api_url}/issues/add_comment", data={"issue": issue_key, "text": text}, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: msg = ( @@ -397,6 +407,7 @@ def test_connection(self): url=f"{self.sonar_api_url}/components/search", params=parameters, headers=self.default_headers, + timeout=settings.REQUESTS_TIMEOUT, ) if not response.ok: diff --git a/dojo/tools/risk_recon/api.py b/dojo/tools/risk_recon/api.py index 898db341ec7..ec505e15bdb 100644 --- a/dojo/tools/risk_recon/api.py +++ b/dojo/tools/risk_recon/api.py @@ -1,4 +1,5 @@ import requests +from django.conf import settings class RiskReconAPI: @@ -33,6 +34,7 @@ def map_toes(self): response = self.session.get( url=f"{self.url}/toes", headers={"accept": "application/json", "Authorization": self.key}, + timeout=settings.REQUESTS_TIMEOUT, ) if response.ok: @@ -75,6 +77,7 @@ def get_findings(self): "accept": "application/json", "Authorization": self.key, }, + timeout=settings.REQUESTS_TIMEOUT, ) if response.ok: diff --git a/requirements-lint.txt b/requirements-lint.txt index 17efde5683a..904511689c4 100644 --- a/requirements-lint.txt +++ b/requirements-lint.txt @@ -1 +1 @@ -ruff==0.7.2 \ No newline at end of file +ruff==0.7.3 \ No newline at end of file diff --git a/ruff.toml b/ruff.toml index 085fc8f3f9d..9203826cda8 100644 --- a/ruff.toml +++ b/ruff.toml @@ -41,7 +41,7 @@ select = [ "UP", "YTT", "ASYNC", - "S2", "S5", "S7", "S101", "S104", "S105", "S108", "S112", "S311", + "S2", "S5", "S7", "S101", "S104", "S105", "S106", "S108", "S311", "S112", "S113", "FBT001", "FBT003", "A003", "A004", "A006", "COM", diff --git a/unittests/test_user_validators.py b/unittests/test_user_validators.py index 2fd8afbbd77..8044202c07c 100644 --- a/unittests/test_user_validators.py +++ b/unittests/test_user_validators.py @@ -142,7 +142,7 @@ def test_validator_non_common_password_required(self): def test_form_invalid_current_pass(self): self.set_policy() - form = self.form_test("x", current_password="not current password") + form = self.form_test("x", current_password="not current password") # noqa: S106 self.assertFalse(form.is_valid()) self.assertEqual( form.errors["__all__"][0], @@ -158,6 +158,6 @@ def test_form_same_pass_as_before(self): def test_form_diff_confirm_password(self): self.set_policy() - form = self.form_test(password="x", confirm_password="y") + form = self.form_test(password="x", confirm_password="y") # noqa: S106 self.assertFalse(form.is_valid()) self.assertEqual(form.errors["__all__"][0], "Passwords do not match.")