diff --git a/dojo/api_v2/mixins.py b/dojo/api_v2/mixins.py index 54d55a76d09..4e2791aec50 100644 --- a/dojo/api_v2/mixins.py +++ b/dojo/api_v2/mixins.py @@ -47,11 +47,11 @@ def flatten(elem): return self.get_paginated_response(serializer.data) -class QuestionSubClassFieldsMixin(object): +class QuestionSubClassFieldsMixin: def get_queryset(self): return Question.objects.select_subclasses() -class AnswerSubClassFieldsMixin(object): +class AnswerSubClassFieldsMixin: def get_queryset(self): return Answer.objects.select_subclasses() diff --git a/dojo/api_v2/permissions.py b/dojo/api_v2/permissions.py index 4c0e8c6e2e3..b9c2ec433bb 100644 --- a/dojo/api_v2/permissions.py +++ b/dojo/api_v2/permissions.py @@ -40,9 +40,7 @@ def check_post_permission(request, post_model, post_pk, post_permission): if request.method == "POST": if request.data.get(post_pk) is None: raise ParseError( - "Unable to check for permissions: Attribute '{}' is required".format( - post_pk - ) + f"Unable to check for permissions: Attribute '{post_pk}' is required" ) object = get_object_or_404(post_model, pk=request.data.get(post_pk)) return user_has_permission(request.user, object, post_permission) @@ -965,8 +963,7 @@ def raise_no_auto_create_import_validation_error( if product_name and not product: if product_type_name: raise serializers.ValidationError( - "Product '%s' doesn't exist in Product_Type '%s'" - % (product_name, product_type_name) + f"Product '{product_name}' doesn't exist in Product_Type '{product_type_name}'" ) else: raise serializers.ValidationError( @@ -975,21 +972,18 @@ def raise_no_auto_create_import_validation_error( if engagement_name and not engagement: raise serializers.ValidationError( - "Engagement '%s' doesn't exist in Product '%s'" - % (engagement_name, product_name) + f"Engagement '{engagement_name}' doesn't exist in Product '{product_name}'" ) # these are only set for reimport if test_title: raise serializers.ValidationError( - "Test '%s' with scan_type '%s' doesn't exist in Engagement '%s'" - % (test_title, scan_type, engagement_name) + f"Test '{test_title}' with scan_type '{scan_type}' doesn't exist in Engagement '{engagement_name}'" ) if scan_type: raise serializers.ValidationError( - "Test with scan_type '%s' doesn't exist in Engagement '%s'" - % (scan_type, engagement_name) + f"Test with scan_type '{scan_type}' doesn't exist in Engagement '{engagement_name}'" ) raise ValidationError(error_message) diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 69a684d1cfd..a39397b8de7 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -245,7 +245,7 @@ def __init__(self, **kwargs): kwargs["style"] = {"base_template": "textarea.html"} kwargs["style"].update(style) - super(TagListSerializerField, self).__init__(**kwargs) + super().__init__(**kwargs) self.pretty_print = pretty_print @@ -300,14 +300,14 @@ class TaggitSerializer(serializers.Serializer): def create(self, validated_data): to_be_tagged, validated_data = self._pop_tags(validated_data) - tag_object = super(TaggitSerializer, self).create(validated_data) + tag_object = super().create(validated_data) return self._save_tags(tag_object, to_be_tagged) def update(self, instance, validated_data): to_be_tagged, validated_data = self._pop_tags(validated_data) - tag_object = super(TaggitSerializer, self).update( + tag_object = super().update( instance, validated_data ) @@ -389,7 +389,7 @@ def __init__(self, **kwargs): if isinstance(data, list): kwargs["many"] = True - super(RequestResponseSerializerField, self).__init__(**kwargs) + super().__init__(**kwargs) self.pretty_print = pretty_print @@ -1464,10 +1464,7 @@ def to_representation(self, data): new_files.append( { "id": file.id, - "file": "{site_url}/{file_access_url}".format( - site_url=settings.SITE_URL, - file_access_url=file.get_accessible_url(test, test.id), - ), + "file": f"{settings.SITE_URL}/{file.get_accessible_url(test, test.id)}", "title": file.title, } ) @@ -2306,13 +2303,11 @@ def validate(self, data): file = data.get("file") if not file and requires_file(scan_type): raise serializers.ValidationError( - "Uploading a Report File is required for {}".format(scan_type) + f"Uploading a Report File is required for {scan_type}" ) if file and is_scan_file_too_large(file): raise serializers.ValidationError( - "Report file is too large. Maximum supported size is {} MB".format( - settings.SCAN_FILE_MAX_SIZE - ) + f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB" ) tool_type = requires_tool_type(scan_type) if tool_type: @@ -2665,13 +2660,11 @@ def validate(self, data): file = data.get("file") if not file and requires_file(scan_type): raise serializers.ValidationError( - "Uploading a Report File is required for {}".format(scan_type) + f"Uploading a Report File is required for {scan_type}" ) if file and is_scan_file_too_large(file): raise serializers.ValidationError( - "Report file is too large. Maximum supported size is {} MB".format( - settings.SCAN_FILE_MAX_SIZE - ) + f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB" ) tool_type = requires_tool_type(scan_type) if tool_type: @@ -2712,9 +2705,7 @@ def validate(self, data): file = data.get("file") if file and is_scan_file_too_large(file): raise serializers.ValidationError( - "Report file is too large. Maximum supported size is {} MB".format( - settings.SCAN_FILE_MAX_SIZE - ) + f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB" ) return data @@ -2818,9 +2809,7 @@ def save(self): def validate(self, data): if is_scan_file_too_large(data["file"]): raise serializers.ValidationError( - "File is too large. Maximum supported size is {} MB".format( - settings.SCAN_FILE_MAX_SIZE - ) + f"File is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB" ) return data diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index 2149a86a356..ff73d2f01f7 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -1214,9 +1214,7 @@ def remove_tags(self, request, pk=None): if tag not in all_tags: return Response( { - "error": "'{}' is not a valid tag in list".format( - tag - ) + "error": f"'{tag}' is not a valid tag in list" }, status=status.HTTP_400_BAD_REQUEST, ) @@ -2877,7 +2875,7 @@ def report_generate(request, obj, options): include_executive_summary = False include_table_of_contents = False - report_info = "Generated By %s on %s" % ( + report_info = "Generated By {} on {}".format( user.get_full_name(), (timezone.now().strftime("%m/%d/%Y %I:%M%p %Z")), ) diff --git a/dojo/authorization/authorization.py b/dojo/authorization/authorization.py index 69f3884a4ce..1cbef9ecd6e 100644 --- a/dojo/authorization/authorization.py +++ b/dojo/authorization/authorization.py @@ -259,7 +259,7 @@ def user_has_global_permission_or_403(user, permission): def get_roles_for_permission(permission): if not Permissions.has_value(permission): raise PermissionDoesNotExistError( - "Permission {} does not exist".format(permission) + f"Permission {permission} does not exist" ) roles_for_permissions = set() roles = get_roles_with_permissions() @@ -274,7 +274,7 @@ def role_has_permission(role, permission): if role is None: return False if not Roles.has_value(role): - raise RoleDoesNotExistError("Role {} does not exist".format(role)) + raise RoleDoesNotExistError(f"Role {role} does not exist") roles = get_roles_with_permissions() permissions = roles.get(role) if not permissions: @@ -286,7 +286,7 @@ def role_has_global_permission(role, permission): if role is None: return False if not Roles.has_value(role): - raise RoleDoesNotExistError("Role {} does not exist".format(role)) + raise RoleDoesNotExistError(f"Role {role} does not exist") roles = get_global_roles_with_permissions() permissions = roles.get(role) if permissions and permission in permissions: diff --git a/dojo/celery.py b/dojo/celery.py index f2d73f03868..1fbf6e73fcb 100644 --- a/dojo/celery.py +++ b/dojo/celery.py @@ -20,7 +20,7 @@ @app.task(bind=True) def debug_task(self): - print(('Request: {0!r}'.format(self.request))) + print(f'Request: {self.request!r}') @setup_logging.connect diff --git a/dojo/components/sql_group_concat.py b/dojo/components/sql_group_concat.py index 5aa8f10d645..ba23b24e385 100644 --- a/dojo/components/sql_group_concat.py +++ b/dojo/components/sql_group_concat.py @@ -9,7 +9,7 @@ def __init__( self, expression, separator, distinct=False, ordering=None, **extra ): self.separator = separator - super(Sql_GroupConcat, self).__init__( + super().__init__( expression, distinct="DISTINCT " if distinct else "", ordering=" ORDER BY %s" % ordering if ordering is not None else "", diff --git a/dojo/development_environment/views.py b/dojo/development_environment/views.py index d6d4c167b17..8de454f1a8b 100644 --- a/dojo/development_environment/views.py +++ b/dojo/development_environment/views.py @@ -85,7 +85,7 @@ def edit_dev_env(request, deid): except RestrictedError as err: messages.add_message(request, messages.WARNING, - 'Environment cannot be deleted: {}'.format(err), + f'Environment cannot be deleted: {err}', extra_tags='alert-warning') return HttpResponseRedirect(reverse('dev_env')) diff --git a/dojo/endpoint/utils.py b/dojo/endpoint/utils.py index f52a46311c0..4c404a32755 100644 --- a/dojo/endpoint/utils.py +++ b/dojo/endpoint/utils.py @@ -92,7 +92,7 @@ def clean_hosts_run(apps, change): def err_log(message, html_log, endpoint_html_log, endpoint): error_suffix = 'It is not possible to migrate it. Delete or edit this endpoint.' html_log.append({**endpoint_html_log, **{'message': message}}) - logger.error('Endpoint (id={}) {}. {}'.format(endpoint.pk, message, error_suffix)) + logger.error(f'Endpoint (id={endpoint.pk}) {message}. {error_suffix}') broken_endpoints.add(endpoint.pk) html_log = [] broken_endpoints = set() @@ -120,8 +120,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if parts.protocol: if endpoint.protocol and (endpoint.protocol != parts.protocol): - message = 'has defined protocol ({}) and it is not the same as protocol in host ' \ - '({})'.format(endpoint.protocol, parts.protocol) + message = f'has defined protocol ({endpoint.protocol}) and it is not the same as protocol in host ' \ + f'({parts.protocol})' err_log(message, html_log, endpoint_html_log, endpoint) else: if change: @@ -135,26 +135,26 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if change: endpoint.host = parts.host else: - message = '"{}" use invalid format of host'.format(endpoint.host) + message = f'"{endpoint.host}" use invalid format of host' err_log(message, html_log, endpoint_html_log, endpoint) if parts.port: try: if (endpoint.port is not None) and (int(endpoint.port) != parts.port): - message = 'has defined port number ({}) and it is not the same as port number in ' \ - 'host ({})'.format(endpoint.port, parts.port) + message = f'has defined port number ({endpoint.port}) and it is not the same as port number in ' \ + f'host ({parts.port})' err_log(message, html_log, endpoint_html_log, endpoint) else: if change: endpoint.port = parts.port except ValueError: - message = 'uses non-numeric port: {}'.format(endpoint.port) + message = f'uses non-numeric port: {endpoint.port}' err_log(message, html_log, endpoint_html_log, endpoint) if parts.path: if endpoint.path and (endpoint.path != parts.path): - message = 'has defined path ({}) and it is not the same as path in host ' \ - '({})'.format(endpoint.path, parts.path) + message = f'has defined path ({endpoint.path}) and it is not the same as path in host ' \ + f'({parts.path})' err_log(message, html_log, endpoint_html_log, endpoint) else: if change: @@ -162,8 +162,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if parts.query: if endpoint.query and (endpoint.query != parts.query): - message = 'has defined query ({}) and it is not the same as query in host ' \ - '({})'.format(endpoint.query, parts.query) + message = f'has defined query ({endpoint.query}) and it is not the same as query in host ' \ + f'({parts.query})' err_log(message, html_log, endpoint_html_log, endpoint) else: if change: @@ -171,8 +171,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint): if parts.fragment: if endpoint.fragment and (endpoint.fragment != parts.fragment): - message = 'has defined fragment ({}) and it is not the same as fragment in host ' \ - '({})'.format(endpoint.fragment, parts.fragment) + message = f'has defined fragment ({endpoint.fragment}) and it is not the same as fragment in host ' \ + f'({parts.fragment})' err_log(message, html_log, endpoint_html_log, endpoint) else: if change: @@ -182,7 +182,7 @@ def err_log(message, html_log, endpoint_html_log, endpoint): endpoint.save() except ValidationError: - message = '"{}" uses invalid format of host'.format(endpoint.host) + message = f'"{endpoint.host}" uses invalid format of host' err_log(message, html_log, endpoint_html_log, endpoint) try: @@ -197,8 +197,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint): err_log('Missing product', html_log, endpoint_html_log, endpoint) if broken_endpoints: - logger.error('It is not possible to migrate database because there is/are {} broken endpoint(s). ' - 'Please check logs.'.format(len(broken_endpoints))) + logger.error(f'It is not possible to migrate database because there is/are {len(broken_endpoints)} broken endpoint(s). ' + 'Please check logs.') else: logger.info('There is not broken endpoint.') @@ -223,8 +223,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint): to_be_deleted.update(ep_ids[1:]) if change: message = "Merging Endpoints {} into '{}'".format( - ["{} (id={})".format(str(x), x.pk) for x in ep[1:]], - "{} (id={})".format(str(ep[0]), ep[0].pk)) + [f"{str(x)} (id={x.pk})" for x in ep[1:]], + f"{str(ep[0])} (id={ep[0].pk})") html_log.append(message) logger.info(message) Endpoint_Status_model.objects\ @@ -240,18 +240,18 @@ def err_log(message, html_log, endpoint_html_log, endpoint): .filter(finding=eps['finding'])\ .order_by('-last_modified') message = "Endpoint Statuses {} will be replaced by '{}'".format( - ["last_modified: {} (id={})".format(x.last_modified, x.pk) for x in esm[1:]], - "last_modified: {} (id={})".format(esm[0].last_modified, esm[0].pk)) + [f"last_modified: {x.last_modified} (id={x.pk})" for x in esm[1:]], + f"last_modified: {esm[0].last_modified} (id={esm[0].pk})") html_log.append(message) logger.info(message) esm.exclude(id=esm[0].pk).delete() if to_be_deleted: if change: - message = "Removing endpoints: {}".format(list(to_be_deleted)) + message = f"Removing endpoints: {list(to_be_deleted)}" Endpoint_model.objects.filter(id__in=to_be_deleted).delete() else: - message = "Redundant endpoints: {}, migration is required.".format(list(to_be_deleted)) + message = f"Redundant endpoints: {list(to_be_deleted)}, migration is required." html_log.append(message) logger.info(message) @@ -283,7 +283,7 @@ def validate_endpoints_to_add(endpoints_to_add): except ValidationError as ves: for ve in ves: errors.append( - ValidationError("Invalid endpoint {}: {}".format(endpoint, ve)) + ValidationError(f"Invalid endpoint {endpoint}: {ve}") ) return endpoint_list, errors diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index d35a1390988..dbe0956d104 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -216,7 +216,7 @@ def delete_endpoint(request, eid): create_notification(event='other', title='Deletion of %s' % endpoint, product=product, - description='The endpoint "%s" was deleted by %s' % (endpoint, request.user), + description=f'The endpoint "{endpoint}" was deleted by {request.user}', url=reverse('endpoint'), icon="exclamation-triangle") return HttpResponseRedirect(reverse('view_product', args=(product.id,))) @@ -372,12 +372,12 @@ def endpoint_bulk_update_all(request, pid=None): calculate_grade(prod) if skipped_endpoint_count > 0: - add_error_message_to_response('Skipped deletion of {} endpoints because you are not authorized.'.format(skipped_endpoint_count)) + add_error_message_to_response(f'Skipped deletion of {skipped_endpoint_count} endpoints because you are not authorized.') if deleted_endpoint_count > 0: messages.add_message(request, messages.SUCCESS, - 'Bulk delete of {} endpoints was successful.'.format(deleted_endpoint_count), + f'Bulk delete of {deleted_endpoint_count} endpoints was successful.', extra_tags='alert-success') else: if endpoints_to_update: @@ -392,7 +392,7 @@ def endpoint_bulk_update_all(request, pid=None): updated_endpoint_count = endpoints.count() if skipped_endpoint_count > 0: - add_error_message_to_response('Skipped mitigation of {} endpoints because you are not authorized.'.format(skipped_endpoint_count)) + add_error_message_to_response(f'Skipped mitigation of {skipped_endpoint_count} endpoints because you are not authorized.') eps_count = Endpoint_Status.objects.filter(endpoint__in=endpoints).update( mitigated=True, @@ -404,8 +404,7 @@ def endpoint_bulk_update_all(request, pid=None): if updated_endpoint_count > 0: messages.add_message(request, messages.SUCCESS, - 'Bulk mitigation of {} endpoints ({} endpoint statuses) was successful.'.format( - updated_endpoint_count, eps_count), + f'Bulk mitigation of {updated_endpoint_count} endpoints ({eps_count} endpoint statuses) was successful.', extra_tags='alert-success') else: messages.add_message(request, @@ -488,7 +487,7 @@ def import_endpoint_meta(request, pid): messages.add_message( request, messages.ERROR, - "Report file is too large. Maximum supported size is {} MB".format(settings.SCAN_FILE_MAX_SIZE), + f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB", extra_tags='alert-danger') create_endpoints = form.cleaned_data['create_endpoints'] diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index 230c18cc3a0..45bb5009c80 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -310,7 +310,7 @@ def delete_engagement(request, eid): create_notification(event='other', title='Deletion of %s' % engagement.name, product=product, - description='The engagement "%s" was deleted by %s' % (engagement.name, request.user), + description=f'The engagement "{engagement.name}" was deleted by {request.user}', url=request.build_absolute_uri(reverse('view_engagements', args=(product.id, ))), recipients=[engagement.lead], icon="exclamation-triangle") @@ -352,7 +352,7 @@ def copy_engagement(request, eid): extra_tags='alert-success') create_notification(event='other', title='Copying of %s' % engagement.name, - description='The engagement "%s" was copied by %s' % (engagement.name, request.user), + description=f'The engagement "{engagement.name}" was copied by {request.user}', product=product, url=request.build_absolute_uri(reverse('view_engagement', args=(engagement_copy.id, ))), recipients=[engagement.lead], @@ -504,7 +504,7 @@ def post(self, request, eid, *args, **kwargs): form = TypedNoteForm(available_note_types=available_note_types) else: form = NoteForm() - title = "Engagement: %s on %s" % (eng.name, eng.product.name) + title = f"Engagement: {eng.name} on {eng.product.name}" messages.add_message(request, messages.SUCCESS, 'Note added successfully.', @@ -755,7 +755,7 @@ def post(self, request, eid=None, pid=None): if scan and is_scan_file_too_large(scan): messages.add_message(request, messages.ERROR, - "Report file is too large. Maximum supported size is {} MB".format(settings.SCAN_FILE_MAX_SIZE), + f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB", extra_tags='alert-danger') return HttpResponseRedirect(reverse('import_scan_results', args=(engagement,))) @@ -1288,11 +1288,10 @@ def engagement_ics(request, eid): uid = "dojo_eng_%d_%d" % (eng.id, eng.product.id) cal = get_cal_event( start_date, end_date, - "Engagement: %s (%s)" % (eng.name, eng.product.name), - "Set aside for engagement %s, on product %s. Additional detail can be found at %s" - % (eng.name, eng.product.name, + f"Engagement: {eng.name} ({eng.product.name})", + "Set aside for engagement {}, on product {}. Additional detail can be found at {}".format(eng.name, eng.product.name, request.build_absolute_uri( - (reverse("view_engagement", args=(eng.id, ))))), uid) + reverse("view_engagement", args=(eng.id, )))), uid) output = cal.serialize() response = HttpResponse(content=output) response['Content-Type'] = 'text/calendar' diff --git a/dojo/filters.py b/dojo/filters.py index 4f1f3c539ad..f5726fb4a80 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -131,7 +131,7 @@ def under_review(self, qs, name): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(FindingStatusFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): earliest_finding = get_earliest_finding(qs) @@ -179,7 +179,7 @@ def sla_violated(self, qs, name): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(FindingSLAFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): try: @@ -214,7 +214,7 @@ def sla_violated(self, qs, name): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(ProductSLAFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): try: @@ -246,7 +246,7 @@ def cwe_options(queryset): class DojoFilter(FilterSet): def __init__(self, *args, **kwargs): - super(DojoFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) for field in ['tags', 'test__tags', 'test__engagement__tags', 'test__engagement__product__tags', 'not_tags', 'not_test__tags', 'not_test__engagement__tags', 'not_test__engagement__product__tags']: @@ -483,7 +483,7 @@ class DateRangeFilter(ChoiceFilter): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(DateRangeFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): try: @@ -545,7 +545,7 @@ class DateRangeOmniFilter(ChoiceFilter): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(DateRangeOmniFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): try: @@ -569,7 +569,7 @@ class ReportBooleanFilter(ChoiceFilter): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(ReportBooleanFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): try: @@ -604,7 +604,7 @@ def was_accepted(self, qs, name): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(ReportRiskAcceptanceFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): try: @@ -679,7 +679,7 @@ def past_year(self, qs, name): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(MetricsDateRangeFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): if value == 8: @@ -729,7 +729,7 @@ class ComponentFilter(ProductComponentFilter): label="Product") def __init__(self, *args, **kwargs): - super(ComponentFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.form.fields[ 'test__engagement__product__prod_type'].queryset = get_authorized_product_types(Permissions.Product_Type_View) self.form.fields[ @@ -792,7 +792,7 @@ class EngagementDirectFilter(DojoFilter): ) def __init__(self, *args, **kwargs): - super(EngagementDirectFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.form.fields['product__prod_type'].queryset = get_authorized_product_types(Permissions.Product_Type_View) self.form.fields['lead'].queryset = get_authorized_users(Permissions.Product_Type_View) \ .filter(engagement__lead__isnull=False).distinct() @@ -852,7 +852,7 @@ class EngagementFilter(DojoFilter): ) def __init__(self, *args, **kwargs): - super(EngagementFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.form.fields['prod_type'].queryset = get_authorized_product_types(Permissions.Product_Type_View) self.form.fields['engagement__lead'].queryset = get_authorized_users(Permissions.Product_Type_View) \ .filter(engagement__lead__isnull=False).distinct() @@ -910,7 +910,7 @@ class ProductEngagementFilter(DojoFilter): ) def __init__(self, *args, **kwargs): - super(ProductEngagementFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.form.fields['lead'].queryset = get_authorized_users(Permissions.Product_Type_View) \ .filter(engagement__lead__isnull=False).distinct() @@ -1098,7 +1098,7 @@ def __init__(self, *args, **kwargs): if 'user' in kwargs: self.user = kwargs.pop('user') - super(ProductFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.form.fields['prod_type'].queryset = get_authorized_product_types(Permissions.Product_Type_View) @@ -1715,7 +1715,7 @@ class Meta: not_tag = CharFilter(field_name='tags__name', lookup_expr='icontains', label='Not tag name contains', exclude=True) def __init__(self, *args, **kwargs): - super(TemplateFindingFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.form.fields['cwe'].choices = cwe_options(self.queryset) @@ -1950,12 +1950,12 @@ def __init__(self, *args, **kwargs): self.user = None if 'user' in kwargs: self.user = kwargs.pop('user') - super(EndpointFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.form.fields['product'].queryset = get_authorized_products(Permissions.Product_View) @property def qs(self): - parent = super(EndpointFilter, self).qs + parent = super().qs return get_authorized_endpoints(Permissions.Endpoint_View, parent) class Meta: @@ -2348,7 +2348,7 @@ class LogEntryFilter(DojoFilter): timestamp = DateRangeFilter() def __init__(self, *args, **kwargs): - super(LogEntryFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.form.fields['actor'].queryset = get_authorized_users(Permissions.Product_View) class Meta: @@ -2467,7 +2467,7 @@ def choice_question(self, qs, name): def __init__(self, *args, **kwargs): kwargs['choices'] = [ (key, value[0]) for key, value in six.iteritems(self.options)] - super(QuestionTypeFilter, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def filter(self, qs, value): try: diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index 95421553f44..debb209b3f1 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -236,7 +236,7 @@ def get_group_by_group_name(finding, finding_group_by_option): group_name = finding.component_name elif finding_group_by_option == 'component_name+component_version': if finding.component_name or finding.component_version: - group_name = '%s:%s' % ((finding.component_name if finding.component_name else 'None'), + group_name = '{}:{}'.format((finding.component_name if finding.component_name else 'None'), (finding.component_version if finding.component_version else 'None')) elif finding_group_by_option == 'file_path': if finding.file_path: diff --git a/dojo/finding/views.py b/dojo/finding/views.py index 08ce201491d..0adae7ae642 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -1279,8 +1279,7 @@ def close_finding(request, fid): event="other", title="Closing of %s" % finding.title, finding=finding, - description='The finding "%s" was closed by %s' - % (finding.title, request.user), + description=f'The finding "{finding.title}" was closed by {request.user}', url=reverse("view_finding", args=(finding.id,)), ) return HttpResponseRedirect( @@ -1443,8 +1442,7 @@ def reopen_finding(request, fid): event="other", title="Reopening of %s" % finding.title, finding=finding, - description='The finding "%s" was reopened by %s' - % (finding.title, request.user), + description=f'The finding "{finding.title}" was reopened by {request.user}', url=reverse("view_finding", args=(finding.id,)), ) return HttpResponseRedirect(reverse("view_finding", args=(finding.id,))) @@ -1501,8 +1499,7 @@ def copy_finding(request, fid): create_notification( event="other", title="Copying of %s" % finding.title, - description='The finding "%s" was copied by %s to %s' - % (finding.title, request.user, test.title), + description=f'The finding "{finding.title}" was copied by {request.user} to {test.title}', product=product, url=request.build_absolute_uri( reverse("copy_finding", args=(finding_copy.id,)) @@ -2274,8 +2271,7 @@ def apply_cwe_mitigation(apply_to_findings, template, update=True): template.save() new_note = Notes() new_note.entry = ( - "CWE remediation text applied to finding for CWE: %s using template: %s." - % (template.cwe, template.title) + f"CWE remediation text applied to finding for CWE: {template.cwe} using template: {template.title}." ) new_note.author, _created = User.objects.get_or_create( username="System" @@ -2518,9 +2514,7 @@ def merge_finding_product(request, pid): for finding in findings_to_merge.exclude( pk=finding_to_merge_into.pk ): - notes_entry = "{}\n- {} ({}),".format( - notes_entry, finding.title, finding.id - ) + notes_entry = f"{notes_entry}\n- {finding.title} ({finding.id})," if finding.static_finding: static = finding.static_finding @@ -2528,23 +2522,17 @@ def merge_finding_product(request, pid): dynamic = finding.dynamic_finding if form.cleaned_data["append_description"]: - finding_descriptions = "{}\n{}".format( - finding_descriptions, finding.description - ) + finding_descriptions = f"{finding_descriptions}\n{finding.description}" # Workaround until file path is one to many if finding.file_path: - finding_descriptions = "{}\n**File Path:** {}\n".format( - finding_descriptions, finding.file_path - ) + finding_descriptions = f"{finding_descriptions}\n**File Path:** {finding.file_path}\n" # If checked merge the Reference if ( form.cleaned_data["append_reference"] and finding.references is not None ): - finding_references = "{}\n{}".format( - finding_references, finding.references - ) + finding_references = f"{finding_references}\n{finding.references}" # if checked merge the endpoints if form.cleaned_data["add_endpoints"]: @@ -2566,9 +2554,7 @@ def merge_finding_product(request, pid): # Add merge finding information to the note if set to inactive if form.cleaned_data["finding_action"] == "inactive": single_finding_notes_entry = ("Finding has been set to inactive " - "and merged with the finding: {}.").format( - finding_to_merge_into.title - ) + f"and merged with the finding: {finding_to_merge_into.title}.") note = Notes( entry=single_finding_notes_entry, author=request.user ) @@ -2581,9 +2567,7 @@ def merge_finding_product(request, pid): # Update the finding to merge into if finding_descriptions != "": - finding_to_merge_into.description = "{}\n\n{}".format( - finding_to_merge_into.description, finding_descriptions - ) + finding_to_merge_into.description = f"{finding_to_merge_into.description}\n\n{finding_descriptions}" if finding_to_merge_into.static_finding: static = finding.static_finding @@ -2592,9 +2576,7 @@ def merge_finding_product(request, pid): dynamic = finding.dynamic_finding if finding_references != "": - finding_to_merge_into.references = "{}\n{}".format( - finding_to_merge_into.references, finding_references - ) + finding_to_merge_into.references = f"{finding_to_merge_into.references}\n{finding_references}" finding_to_merge_into.static_finding = static finding_to_merge_into.dynamic_finding = dynamic @@ -2624,9 +2606,7 @@ def merge_finding_product(request, pid): findings_to_merge.delete() notes_entry = ("Finding consists of merged findings from the following " - "findings which have been {}: {}").format( - finding_action, notes_entry[:-1] - ) + f"findings which have been {finding_action}: {notes_entry[:-1]}") note = Notes(entry=notes_entry, author=request.user) note.save() finding_to_merge_into.notes.add(note) @@ -2716,18 +2696,14 @@ def finding_bulk_update_all(request, pid=None): if skipped_find_count > 0: add_error_message_to_response( - "Skipped deletion of {} findings because you are not authorized.".format( - skipped_find_count - ) + f"Skipped deletion of {skipped_find_count} findings because you are not authorized." ) if deleted_find_count > 0: messages.add_message( request, messages.SUCCESS, - "Bulk delete of {} findings was successful.".format( - deleted_find_count - ), + f"Bulk delete of {deleted_find_count} findings was successful.", extra_tags="alert-success", ) else: @@ -2748,9 +2724,7 @@ def finding_bulk_update_all(request, pid=None): if skipped_find_count > 0: add_error_message_to_response( - "Skipped update of {} findings because you are not authorized.".format( - skipped_find_count - ) + f"Skipped update of {skipped_find_count} findings because you are not authorized." ) finds = prefetch_for_findings(finds) @@ -2887,8 +2861,7 @@ def finding_bulk_update_all(request, pid=None): if added: add_success_message_to_response( - "Added %s findings to finding group %s" - % (added, finding_group.name) + f"Added {added} findings to finding group {finding_group.name}" ) return_url = reverse( "view_finding_group", args=(finding_group.id,) @@ -2896,9 +2869,8 @@ def finding_bulk_update_all(request, pid=None): if skipped: add_success_message_to_response( - ("Skipped %s findings when adding to finding group %s, " - "findings already part of another group") - % (skipped, finding_group.name) + f"Skipped {skipped} findings when adding to finding group {finding_group.name}, " + "findings already part of another group" ) # refresh findings from db @@ -2914,8 +2886,7 @@ def finding_bulk_update_all(request, pid=None): if removed: add_success_message_to_response( - "Removed %s findings from finding groups %s" - % ( + "Removed {} findings from finding groups {}".format( removed, ",".join( [ @@ -2958,9 +2929,8 @@ def finding_bulk_update_all(request, pid=None): if skipped: add_success_message_to_response( - ("Skipped %s findings when grouping by %s as these findings " - "were already in an existing group") - % (skipped, finding_group_by_option) + f"Skipped {skipped} findings when grouping by {finding_group_by_option} as these findings " + "were already in an existing group" ) # refresh findings from db @@ -3104,9 +3074,7 @@ def finding_bulk_update_all(request, pid=None): messages.add_message( request, messages.SUCCESS, - "Bulk update of {} findings was successful.".format( - updated_find_count - ), + f"Bulk update of {updated_find_count} findings was successful.", extra_tags="alert-success", ) else: diff --git a/dojo/finding_group/views.py b/dojo/finding_group/views.py index e6f92a71d27..054fb29bfe8 100644 --- a/dojo/finding_group/views.py +++ b/dojo/finding_group/views.py @@ -122,7 +122,7 @@ def delete_finding_group(request, fgid): create_notification(event='other', title='Deletion of %s' % finding_group.name, product=product, - description='The finding group "%s" was deleted by %s' % (finding_group.name, request.user), + description=f'The finding group "{finding_group.name}" was deleted by {request.user}', url=request.build_absolute_uri(reverse('view_test', args=(finding_group.test.id,))), icon="exclamation-triangle") return HttpResponseRedirect(reverse('view_test', args=(finding_group.test.id,))) diff --git a/dojo/forms.py b/dojo/forms.py index 1d24ed429ab..8bce183a95c 100755 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -75,7 +75,7 @@ class MultipleSelectWithPop(forms.SelectMultiple): def render(self, name, *args, **kwargs): - html = super(MultipleSelectWithPop, self).render(name, *args, **kwargs) + html = super().render(name, *args, **kwargs) popup_plus = '
' return mark_safe(popup_plus) @@ -154,7 +154,7 @@ def value_from_datadict(self, data, files, name): if y == m == "0": return None if y and m: - return '%s-%s-%s' % (y, m, 1) + return f'{y}-{m}-{1}' return data.get(name, None) @@ -179,7 +179,7 @@ class Meta: class Edit_Product_Type_MemberForm(forms.ModelForm): def __init__(self, *args, **kwargs): - super(Edit_Product_Type_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product_type'].disabled = True self.fields['user'].queryset = Dojo_User.objects.order_by('first_name', 'last_name') self.fields['user'].disabled = True @@ -193,7 +193,7 @@ class Add_Product_Type_MemberForm(forms.ModelForm): users = forms.ModelMultipleChoiceField(queryset=Dojo_User.objects.none(), required=True, label='Users') def __init__(self, *args, **kwargs): - super(Add_Product_Type_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) current_members = Product_Type_Member.objects.filter(product_type=self.initial["product_type"]).values_list('user', flat=True) self.fields['users'].queryset = Dojo_User.objects.exclude( Q(is_superuser=True) @@ -209,7 +209,7 @@ class Add_Product_Type_Member_UserForm(forms.ModelForm): product_types = forms.ModelMultipleChoiceField(queryset=Product_Type.objects.none(), required=True, label='Product Types') def __init__(self, *args, **kwargs): - super(Add_Product_Type_Member_UserForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) current_members = Product_Type_Member.objects.filter(user=self.initial['user']).values_list('product_type', flat=True) self.fields['product_types'].queryset = get_authorized_product_types(Permissions.Product_Type_Member_Add_Owner) \ .exclude(id__in=current_members) @@ -222,7 +222,7 @@ class Meta: class Delete_Product_Type_MemberForm(Edit_Product_Type_MemberForm): def __init__(self, *args, **kwargs): - super(Delete_Product_Type_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['role'].disabled = True @@ -263,7 +263,7 @@ class ProductForm(forms.ModelForm): team_manager = forms.ModelChoiceField(queryset=Dojo_User.objects.exclude(is_active=False).order_by('first_name', 'last_name'), required=False) def __init__(self, *args, **kwargs): - super(ProductForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['prod_type'].queryset = get_authorized_product_types(Permissions.Product_Type_Add_Product) # if this product has findings being asynchronously updated, disable the sla config field @@ -294,7 +294,7 @@ class EditFindingGroupForm(forms.ModelForm): help_text='Leave empty and check push to jira to create a new JIRA issue for this finding group.') def __init__(self, *args, **kwargs): - super(EditFindingGroupForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) import dojo.jira_link.helper as jira_helper self.fields['push_to_jira'] = forms.BooleanField() @@ -325,7 +325,7 @@ class Meta: class Edit_Product_MemberForm(forms.ModelForm): def __init__(self, *args, **kwargs): - super(Edit_Product_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product'].disabled = True self.fields['user'].queryset = Dojo_User.objects.order_by('first_name', 'last_name') self.fields['user'].disabled = True @@ -339,7 +339,7 @@ class Add_Product_MemberForm(forms.ModelForm): users = forms.ModelMultipleChoiceField(queryset=Dojo_User.objects.none(), required=True, label='Users') def __init__(self, *args, **kwargs): - super(Add_Product_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product'].disabled = True current_members = Product_Member.objects.filter(product=self.initial["product"]).values_list('user', flat=True) self.fields['users'].queryset = Dojo_User.objects.exclude( @@ -355,7 +355,7 @@ class Add_Product_Member_UserForm(forms.ModelForm): products = forms.ModelMultipleChoiceField(queryset=Product.objects.none(), required=True, label='Products') def __init__(self, *args, **kwargs): - super(Add_Product_Member_UserForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) current_members = Product_Member.objects.filter(user=self.initial["user"]).values_list('product', flat=True) self.fields['products'].queryset = get_authorized_products(Permissions.Product_Member_Add_Owner) \ .exclude(id__in=current_members) @@ -368,7 +368,7 @@ class Meta: class Delete_Product_MemberForm(Edit_Product_MemberForm): def __init__(self, *args, **kwargs): - super(Delete_Product_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['role'].disabled = True @@ -385,14 +385,14 @@ class EditNoteTypeForm(NoteTypeForm): def __init__(self, *args, **kwargs): is_single = kwargs.pop('is_single') - super(EditNoteTypeForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if is_single is False: self.fields['is_single'].widget = forms.HiddenInput() class DisableOrEnableNoteTypeForm(NoteTypeForm): def __init__(self, *args, **kwargs): - super(DisableOrEnableNoteTypeForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['name'].disabled = True self.fields['description'].disabled = True self.fields['is_single'].disabled = True @@ -409,7 +409,7 @@ class DojoMetaDataForm(forms.ModelForm): required=True) def full_clean(self): - super(DojoMetaDataForm, self).full_clean() + super().full_clean() try: self.instance.validate_unique() except ValidationError: @@ -498,7 +498,7 @@ class ImportScanForm(forms.Form): create_finding_groups_for_all_findings = forms.BooleanField(help_text="If unchecked, finding groups will only be created when there is more than one grouped finding", required=False, initial=True) def __init__(self, *args, **kwargs): - super(ImportScanForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['active'].initial = self.active_verified_choices[0] self.fields['verified'].initial = self.active_verified_choices[0] @@ -515,7 +515,7 @@ def clean(self): scan_type = cleaned_data.get("scan_type") file = cleaned_data.get("file") if requires_file(scan_type) and not file: - raise forms.ValidationError('Uploading a Report File is required for {}'.format(scan_type)) + raise forms.ValidationError(f'Uploading a Report File is required for {scan_type}') tool_type = requires_tool_type(scan_type) if tool_type: api_scan_configuration = cleaned_data.get('api_scan_configuration') @@ -596,7 +596,7 @@ class ReImportScanForm(forms.Form): create_finding_groups_for_all_findings = forms.BooleanField(help_text="If unchecked, finding groups will only be created when there is more than one grouped finding", required=False, initial=True) def __init__(self, *args, test=None, **kwargs): - super(ReImportScanForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['active'].initial = self.active_verified_choices[0] self.fields['verified'].initial = self.active_verified_choices[0] self.scan_type = None @@ -653,7 +653,7 @@ class ImportEndpointMetaForm(forms.Form): help_text="Add data from file as Metadata. Metadata is used for displaying custom fields",) def __init__(self, *args, **kwargs): - super(ImportEndpointMetaForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) class DoneForm(forms.Form): @@ -696,7 +696,7 @@ class MergeFindings(forms.ModelForm): def __init__(self, *args, **kwargs): _ = kwargs.pop('finding') findings = kwargs.pop('findings') - super(MergeFindings, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['finding_to_merge_into'] = forms.ModelChoiceField( queryset=findings, initial=0, required="False", label="Finding to Merge Into", help_text="Findings selected below will be merged into this finding.") @@ -817,7 +817,7 @@ class CheckForm(forms.ModelForm): def __init__(self, *args, **kwargs): findings = kwargs.pop('findings') - super(CheckForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['session_issues'].queryset = findings self.fields['crypto_issues'].queryset = findings self.fields['config_issues'].queryset = findings @@ -868,7 +868,7 @@ def __init__(self, *args, **kwargs): if 'user' in kwargs: self.user = kwargs.pop('user') - super(EngForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if product: self.fields['preset'] = forms.ModelChoiceField(help_text="Settings and notes for performing this engagement.", required=False, queryset=Engagement_Presets.objects.filter(product=product)) @@ -892,7 +892,7 @@ def __init__(self, *args, **kwargs): del self.fields['status'] def is_valid(self): - valid = super(EngForm, self).is_valid() + valid = super().is_valid() # we're done now if not valid if not valid: @@ -943,7 +943,7 @@ def __init__(self, *args, **kwargs): if 'instance' in kwargs: obj = kwargs.get('instance') - super(TestForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if obj: product = get_product(obj) @@ -976,7 +976,7 @@ class CopyTestForm(forms.Form): def __init__(self, *args, **kwargs): authorized_lists = kwargs.pop('engagements', None) - super(CopyTestForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['engagement'].queryset = authorized_lists @@ -1024,7 +1024,7 @@ def __init__(self, *args, **kwargs): if 'product' in kwargs: product = kwargs.pop('product') - super(AddFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if product: self.fields['endpoints'].queryset = Endpoint.objects.filter(product=product) @@ -1036,7 +1036,7 @@ def __init__(self, *args, **kwargs): self.endpoints_to_add_list = [] def clean(self): - cleaned_data = super(AddFindingForm, self).clean() + cleaned_data = super().clean() if ((cleaned_data['active'] or cleaned_data['verified']) and cleaned_data['duplicate']): raise forms.ValidationError('Duplicate findings cannot be' ' verified or active') @@ -1105,7 +1105,7 @@ def __init__(self, *args, **kwargs): if 'product' in kwargs: product = kwargs.pop('product') - super(AdHocFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if product: self.fields['endpoints'].queryset = Endpoint.objects.filter(product=product) @@ -1117,7 +1117,7 @@ def __init__(self, *args, **kwargs): self.endpoints_to_add_list = [] def clean(self): - cleaned_data = super(AdHocFindingForm, self).clean() + cleaned_data = super().clean() if ((cleaned_data['active'] or cleaned_data['verified']) and cleaned_data['duplicate']): raise forms.ValidationError('Duplicate findings cannot be' ' verified or active') @@ -1173,7 +1173,7 @@ def __init__(self, *args, **kwargs): if 'product' in kwargs: product = kwargs.pop('product') - super(PromoteFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if product: self.fields['endpoints'].queryset = Endpoint.objects.filter(product=product) @@ -1181,7 +1181,7 @@ def __init__(self, *args, **kwargs): self.endpoints_to_add_list = [] def clean(self): - cleaned_data = super(PromoteFindingForm, self).clean() + cleaned_data = super().clean() endpoints_to_add_list, errors = validate_endpoints_to_add(cleaned_data['endpoints_to_add']) if errors: @@ -1249,7 +1249,7 @@ def __init__(self, *args, **kwargs): self.can_edit_mitigated_data = kwargs.pop('can_edit_mitigated_data') if 'can_edit_mitigated_data' in kwargs \ else False - super(FindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['endpoints'].queryset = Endpoint.objects.filter(product=self.instance.test.engagement.product) self.fields['mitigated_by'].queryset = get_authorized_users(Permissions.Test_Edit) @@ -1295,7 +1295,7 @@ def __init__(self, *args, **kwargs): self.endpoints_to_add_list = [] def clean(self): - cleaned_data = super(FindingForm, self).clean() + cleaned_data = super().clean() if (cleaned_data['active'] or cleaned_data['verified']) and cleaned_data['duplicate']: raise forms.ValidationError('Duplicate findings cannot be' @@ -1316,7 +1316,7 @@ def clean(self): return cleaned_data def _post_clean(self): - super(FindingForm, self)._post_clean() + super()._post_clean() if self.can_edit_mitigated_data: opts = self.instance._meta @@ -1342,7 +1342,7 @@ class Meta: 'date', 'description', 'severity', 'reporter', 'test', 'is_mitigated') def clean(self): - cleaned_data = super(StubFindingForm, self).clean() + cleaned_data = super().clean() if 'title' in cleaned_data: if len(cleaned_data['title']) <= 0: raise forms.ValidationError("The title is required.") @@ -1370,14 +1370,14 @@ class ApplyFindingTemplateForm(forms.Form): tags = TagField(required=False, help_text="Add tags that help describe this finding template. Choose from the list or add new tags. Press Enter key to add.", initial=Finding.tags.tag_model.objects.all().order_by('name')) def __init__(self, template=None, *args, **kwargs): - super(ApplyFindingTemplateForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['tags'].autocomplete_tags = Finding.tags.tag_model.objects.all().order_by('name') self.template = template if template: self.template.vulnerability_ids = '\n'.join(template.vulnerability_ids) def clean(self): - cleaned_data = super(ApplyFindingTemplateForm, self).clean() + cleaned_data = super().clean() if 'title' in cleaned_data: if len(cleaned_data['title']) <= 0: @@ -1409,7 +1409,7 @@ class FindingTemplateForm(forms.ModelForm): field_order = ['title', 'cwe', 'vulnerability_ids', 'severity', 'cvssv3', 'description', 'mitigation', 'impact', 'references', 'tags', 'template_match', 'template_match_cwe', 'template_match_title', 'apply_to_findings'] def __init__(self, *args, **kwargs): - super(FindingTemplateForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['tags'].autocomplete_tags = Finding.tags.tag_model.objects.all().order_by('name') class Meta: @@ -1452,13 +1452,13 @@ class FindingBulkUpdateForm(forms.ModelForm): notes = forms.CharField(required=False, max_length=1024, widget=forms.TextInput(attrs={'class': 'form-control'})) def __init__(self, *args, **kwargs): - super(FindingBulkUpdateForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['severity'].required = False # we need to defer initialization to prevent multiple initializations if other forms are shown self.fields['tags'].widget.tag_options = tagulous.models.options.TagOptions(autocomplete_settings={'width': '200px', 'defer': True}) def clean(self): - cleaned_data = super(FindingBulkUpdateForm, self).clean() + cleaned_data = super().clean() if (cleaned_data['active'] or cleaned_data['verified']) and cleaned_data['duplicate']: raise forms.ValidationError('Duplicate findings cannot be' @@ -1482,7 +1482,7 @@ class Meta: def __init__(self, *args, **kwargs): self.product = None self.endpoint_instance = None - super(EditEndpointForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if 'instance' in kwargs: self.endpoint_instance = kwargs.pop('instance') self.product = self.endpoint_instance.product @@ -1492,7 +1492,7 @@ def __init__(self, *args, **kwargs): def clean(self): - cleaned_data = super(EditEndpointForm, self).clean() + cleaned_data = super().clean() protocol = cleaned_data['protocol'] userinfo = cleaned_data['userinfo'] @@ -1536,7 +1536,7 @@ def __init__(self, *args, **kwargs): product = None if 'product' in kwargs: product = kwargs.pop('product') - super(AddEndpointForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product'] = forms.ModelChoiceField(queryset=get_authorized_products(Permissions.Endpoint_Add)) if product is not None: self.fields['product'].initial = product.id @@ -1562,7 +1562,7 @@ def save(self): def clean(self): - cleaned_data = super(AddEndpointForm, self).clean() + cleaned_data = super().clean() if 'endpoint' in cleaned_data and 'product' in cleaned_data: endpoint = cleaned_data['endpoint'] @@ -1606,10 +1606,10 @@ class TypedNoteForm(NoteForm): def __init__(self, *args, **kwargs): queryset = kwargs.pop('available_note_types') - super(TypedNoteForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['note_type'] = forms.ModelChoiceField(queryset=queryset, label='Note Type', required=True) - class Meta(): + class Meta: model = Notes fields = ['note_type', 'entry', 'private'] @@ -1639,7 +1639,7 @@ class CloseFindingForm(forms.ModelForm): def __init__(self, *args, **kwargs): queryset = kwargs.pop('missing_note_types') - super(CloseFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if len(queryset) == 0: self.fields['note_type'].widget = forms.HiddenInput() else: @@ -1654,7 +1654,7 @@ def __init__(self, *args, **kwargs): self.fields['mitigated_by'].initial = self.instance.mitigated_by def _post_clean(self): - super(CloseFindingForm, self)._post_clean() + super()._post_clean() if self.can_edit_mitigated_data: opts = self.instance._meta @@ -1676,7 +1676,7 @@ def __init__(self, *args, **kwargs): if 'finding' in kwargs: finding = kwargs.pop('finding') - super(EditPlannedRemediationDateFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['planned_remediation_date'].required = True self.fields['planned_remediation_date'].widget = forms.DateInput(attrs={'class': 'datepicker'}) @@ -1742,7 +1742,7 @@ class ReviewFindingForm(forms.Form): def __init__(self, *args, **kwargs): finding = kwargs.pop("finding", None) user = kwargs.pop("user", None) - super(ReviewFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) # Get the list of users if finding is not None: users = get_authorized_users_for_product_and_product_type(None, finding.test.engagement.product, Permissions.Finding_Edit) @@ -1776,7 +1776,7 @@ class WeeklyMetricsForm(forms.Form): dates = forms.ChoiceField() def __init__(self, *args, **kwargs): - super(WeeklyMetricsForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) wmf_options = [] for i in range(6): @@ -1842,7 +1842,7 @@ def __init__(self, *args, **kwargs): exclude_product_types = kwargs.get('exclude_product_types', False) if 'exclude_product_types' in kwargs: del kwargs['exclude_product_types'] - super(MetricsFilterForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if exclude_product_types: del self.fields['exclude_product_types'] @@ -1871,7 +1871,7 @@ class Add_Group_MemberForm(forms.ModelForm): users = forms.ModelMultipleChoiceField(queryset=Dojo_Group_Member.objects.none(), required=True, label='Users') def __init__(self, *args, **kwargs): - super(Add_Group_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['group'].disabled = True current_members = Dojo_Group_Member.objects.filter(group=self.initial['group']).values_list('user', flat=True) self.fields['users'].queryset = Dojo_User.objects.exclude( @@ -1888,7 +1888,7 @@ class Add_Group_Member_UserForm(forms.ModelForm): groups = forms.ModelMultipleChoiceField(queryset=Dojo_Group.objects.none(), required=True, label='Groups') def __init__(self, *args, **kwargs): - super(Add_Group_Member_UserForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['user'].disabled = True current_groups = Dojo_Group_Member.objects.filter(user=self.initial['user']).values_list('group', flat=True) self.fields['groups'].queryset = Dojo_Group.objects.exclude(id__in=current_groups) @@ -1901,7 +1901,7 @@ class Meta: class Edit_Group_MemberForm(forms.ModelForm): def __init__(self, *args, **kwargs): - super(Edit_Group_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['group'].disabled = True self.fields['user'].disabled = True self.fields['role'].queryset = get_group_member_roles() @@ -1913,7 +1913,7 @@ class Meta: class Delete_Group_MemberForm(Edit_Group_MemberForm): def __init__(self, *args, **kwargs): - super(Delete_Group_MemberForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['role'].disabled = True @@ -1921,7 +1921,7 @@ class Add_Product_GroupForm(forms.ModelForm): groups = forms.ModelMultipleChoiceField(queryset=Dojo_Group.objects.none(), required=True, label='Groups') def __init__(self, *args, **kwargs): - super(Add_Product_GroupForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product'].disabled = True current_groups = Product_Group.objects.filter(product=self.initial["product"]).values_list('group', flat=True) authorized_groups = get_authorized_groups(Permissions.Group_View) @@ -1937,7 +1937,7 @@ class Add_Product_Group_GroupForm(forms.ModelForm): products = forms.ModelMultipleChoiceField(queryset=Product.objects.none(), required=True, label='Products') def __init__(self, *args, **kwargs): - super(Add_Product_Group_GroupForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) current_members = Product_Group.objects.filter(group=self.initial["group"]).values_list('product', flat=True) self.fields['products'].queryset = get_authorized_products(Permissions.Product_Member_Add_Owner) \ .exclude(id__in=current_members) @@ -1951,7 +1951,7 @@ class Meta: class Edit_Product_Group_Form(forms.ModelForm): def __init__(self, *args, **kwargs): - super(Edit_Product_Group_Form, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product'].disabled = True self.fields['group'].disabled = True @@ -1962,7 +1962,7 @@ class Meta: class Delete_Product_GroupForm(Edit_Product_Group_Form): def __init__(self, *args, **kwargs): - super(Delete_Product_GroupForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['role'].disabled = True @@ -1970,7 +1970,7 @@ class Add_Product_Type_GroupForm(forms.ModelForm): groups = forms.ModelMultipleChoiceField(queryset=Dojo_Group.objects.none(), required=True, label='Groups') def __init__(self, *args, **kwargs): - super(Add_Product_Type_GroupForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) current_groups = Product_Type_Group.objects.filter(product_type=self.initial["product_type"]).values_list('group', flat=True) authorized_groups = get_authorized_groups(Permissions.Group_View) authorized_groups = authorized_groups.exclude(id__in=current_groups) @@ -1986,7 +1986,7 @@ class Add_Product_Type_Group_GroupForm(forms.ModelForm): product_types = forms.ModelMultipleChoiceField(queryset=Product_Type.objects.none(), required=True, label='Product Types') def __init__(self, *args, **kwargs): - super(Add_Product_Type_Group_GroupForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) current_members = Product_Type_Group.objects.filter(group=self.initial['group']).values_list('product_type', flat=True) self.fields['product_types'].queryset = get_authorized_product_types(Permissions.Product_Type_Member_Add_Owner) \ .exclude(id__in=current_members) @@ -2000,7 +2000,7 @@ class Meta: class Edit_Product_Type_Group_Form(forms.ModelForm): def __init__(self, *args, **kwargs): - super(Edit_Product_Type_Group_Form, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product_type'].disabled = True self.fields['group'].disabled = True @@ -2011,13 +2011,13 @@ class Meta: class Delete_Product_Type_GroupForm(Edit_Product_Type_Group_Form): def __init__(self, *args, **kwargs): - super(Delete_Product_Type_GroupForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['role'].disabled = True class DojoUserForm(forms.ModelForm): def __init__(self, *args, **kwargs): - super(DojoUserForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if not get_current_user().is_superuser and not get_system_setting('enable_user_profile_editable'): for field in self.fields: self.fields[field].disabled = True @@ -2045,7 +2045,7 @@ def __init__(self, *args, **kwargs): self.user = None if 'user' in kwargs: self.user = kwargs.pop('user') - super(ChangePasswordForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['new_password'].help_text = get_password_requirements_string() def clean(self): @@ -2214,7 +2214,7 @@ class CopyFindingForm(forms.Form): def __init__(self, *args, **kwargs): authorized_lists = kwargs.pop('tests', None) - super(CopyFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['test'].queryset = authorized_lists @@ -2329,7 +2329,7 @@ class JIRAForm(BaseJiraForm): help_text='Choose the folder containing the Django templates used to render the JIRA issue description. These are stored in dojo/templates/issue-trackers. Leave empty to use the default jira_full templates.') def __init__(self, *args, **kwargs): - super(JIRAForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.instance: self.fields['password'].required = False @@ -2380,7 +2380,7 @@ class Meta: class Product_API_Scan_ConfigurationForm(forms.ModelForm): def __init__(self, *args, **kwargs): - super(Product_API_Scan_ConfigurationForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) tool_configuration = forms.ModelChoiceField( label='Tool Configuration', @@ -2453,7 +2453,7 @@ class Meta: exclude = ['product', 'tags'] def __init__(self, *args, **kwargs): - super(DeleteAppAnalysisForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['name'].disabled = True self.fields['user'].disabled = True self.fields['confidence'].disabled = True @@ -2489,7 +2489,7 @@ def clean(self): class SLAConfigForm(forms.ModelForm): def __init__(self, *args, **kwargs): - super(SLAConfigForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) # if this sla config has findings being asynchronously updated, disable the days by severity fields if self.instance.async_updating: @@ -2574,7 +2574,7 @@ class Meta: exclude = ['product'] def __init__(self, *args, **kwargs): - super(ObjectSettingsForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def clean(self): form_data = self.cleaned_data @@ -2625,7 +2625,7 @@ class SystemSettingsForm(forms.ModelForm): jira_webhook_secret = forms.CharField(required=False) def __init__(self, *args, **kwargs): - super(SystemSettingsForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['default_group_role'].queryset = get_group_member_roles() def clean(self): @@ -2667,7 +2667,7 @@ class Meta: class ProductNotificationsForm(forms.ModelForm): def __init__(self, *args, **kwargs): - super(ProductNotificationsForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if not self.instance.id: self.initial['engagement_added'] = '' self.initial['close_engagement'] = '' @@ -2837,7 +2837,7 @@ def clean(self): class GITHUBFindingForm(forms.Form): def __init__(self, *args, **kwargs): self.enabled = kwargs.pop('enabled') - super(GITHUBFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['push_to_github'] = forms.BooleanField() self.fields['push_to_github'].required = False self.fields['push_to_github'].help_text = "Checking this will overwrite content of your Github issue, or create one." @@ -2857,7 +2857,7 @@ def __init__(self, *args, **kwargs): if self.instance is None and self.jira_project is None: raise ValueError('either and finding instance or jira_project is needed') - super(JIRAFindingForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['push_to_jira'] = forms.BooleanField() self.fields['push_to_jira'].required = False if is_finding_groups_enabled(): @@ -2892,7 +2892,7 @@ def __init__(self, *args, **kwargs): def clean(self): logger.debug('jform clean') - super(JIRAFindingForm, self).clean() + super().clean() jira_issue_key_new = self.cleaned_data.get('jira_issue') finding = self.instance jira_project = self.jira_project @@ -2968,7 +2968,7 @@ class JIRAImportScanForm(forms.Form): def __init__(self, *args, **kwargs): self.push_all = kwargs.pop('push_all', False) - super(JIRAImportScanForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.push_all: # This will show the checkbox as checked and greyed out, this way the user is aware # that issues will be pushed to JIRA, given their product-level settings. @@ -2987,7 +2987,7 @@ class JIRAEngagementForm(forms.Form): def __init__(self, *args, **kwargs): self.instance = kwargs.pop('instance', None) - super(JIRAEngagementForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.instance: if self.instance.has_jira_issue: @@ -3026,7 +3026,7 @@ class Meta: class AnnouncementRemoveForm(AnnouncementCreateForm): def __init__(self, *args, **kwargs): - super(AnnouncementRemoveForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['dismissable'].disabled = True self.fields['message'].disabled = True self.fields['style'].disabled = True @@ -3069,12 +3069,12 @@ def __init__(self, *args, **kwargs): raise ValueError('Need a question to render') del kwargs['question'] - super(QuestionForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) class TextQuestionForm(QuestionForm): def __init__(self, *args, **kwargs): - super(TextQuestionForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) # work out initial data @@ -3119,7 +3119,7 @@ def save(self): class ChoiceQuestionForm(QuestionForm): def __init__(self, *args, **kwargs): - super(ChoiceQuestionForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) choices = [(c.id, c.label) for c in self.question.choices.all()] @@ -3326,7 +3326,7 @@ def __init__(self, attrs=None): forms.TextInput(attrs={'data-type': 'choice'}), forms.TextInput(attrs={'data-type': 'choice'}), forms.TextInput(attrs={'data-type': 'choice'})] - super(MultiWidgetBasic, self).__init__(widgets, attrs) + super().__init__(widgets, attrs) def decompress(self, value): if value: @@ -3348,7 +3348,7 @@ def __init__(self, *args, **kwargs): forms.fields.CharField(required=False), forms.fields.CharField(required=False), forms.fields.CharField(required=False)] - super(MultiExampleField, self).__init__(list_fields, *args, **kwargs) + super().__init__(list_fields, *args, **kwargs) def compress(self, values): return pickle.dumps(values) @@ -3405,7 +3405,7 @@ def __init__(self, *args, **kwargs): assignee = None if 'assignee' in kwargs: assignee = kwargs.pop('asignees') - super(AssignUserForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if assignee is None: self.fields['assignee'] = forms.ModelChoiceField(queryset=get_authorized_users(Permissions.Engagement_View), empty_label='Not Assigned', required=False) else: @@ -3424,7 +3424,7 @@ class AddEngagementForm(forms.Form): help_text='Select which product to attach Engagement') def __init__(self, *args, **kwargs): - super(AddEngagementForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.fields['product'].queryset = get_authorized_products(Permissions.Engagement_Add) @@ -3433,7 +3433,7 @@ class ConfigurationPermissionsForm(forms.Form): def __init__(self, *args, **kwargs): self.user = kwargs.pop('user', None) self.group = kwargs.pop('group', None) - super(ConfigurationPermissionsForm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.permission_fields = get_configuration_permissions_fields() diff --git a/dojo/group/views.py b/dojo/group/views.py index 8cdf17b31c3..d454a8d977d 100644 --- a/dojo/group/views.py +++ b/dojo/group/views.py @@ -419,7 +419,7 @@ def edit_group_member(request, mid): if owners < 1: messages.add_message(request, messages.WARNING, - 'There must be at least one owner for group {}.'.format(member.group.name), + f'There must be at least one owner for group {member.group.name}.', extra_tags='alert-warning') if is_title_in_breadcrumbs('View User'): return HttpResponseRedirect(reverse('view_user', args=(member.user.id, ))) @@ -461,7 +461,7 @@ def delete_group_member(request, mid): if owners <= 1: messages.add_message(request, messages.WARNING, - 'There must be at least one owner for group {}.'.format(member.group.name), + f'There must be at least one owner for group {member.group.name}.', extra_tags='alert-warning') if is_title_in_breadcrumbs('View User'): return HttpResponseRedirect(reverse('view_user', args=(member.user.id, ))) diff --git a/dojo/importers/importer/importer.py b/dojo/importers/importer/importer.py index 337df8d0470..1be23a01e85 100644 --- a/dojo/importers/importer/importer.py +++ b/dojo/importers/importer/importer.py @@ -23,7 +23,7 @@ deduplicationLogger = logging.getLogger("dojo.specific-loggers.deduplication") -class DojoDefaultImporter(object): +class DojoDefaultImporter: def create_test(self, scan_type, test_type_name, engagement, lead, environment, tags=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, now=timezone.now(), diff --git a/dojo/importers/reimporter/reimporter.py b/dojo/importers/reimporter/reimporter.py index 1515b6ba410..0128d0daaa9 100644 --- a/dojo/importers/reimporter/reimporter.py +++ b/dojo/importers/reimporter/reimporter.py @@ -22,7 +22,7 @@ deduplicationLogger = logging.getLogger("dojo.specific-loggers.deduplication") -class DojoDefaultReImporter(object): +class DojoDefaultReImporter: @dojo_async_task @app.task(ignore_result=False) def process_parsed_findings( @@ -106,7 +106,7 @@ def process_parsed_findings( except ValidationError as err: logger.warning( "DefectDojo is storing broken endpoint because cleaning wasn't successful: " - "{}".format(err) + f"{err}" ) item.hash_code = item.compute_hash_code() diff --git a/dojo/importers/utils.py b/dojo/importers/utils.py index 0b98caf551d..5788b5fcf45 100644 --- a/dojo/importers/utils.py +++ b/dojo/importers/utils.py @@ -142,7 +142,7 @@ def add_endpoints_to_unsaved_finding(finding, test, endpoints, **kwargs): endpoint.clean() except ValidationError as e: logger.warning("DefectDojo is storing broken endpoint because cleaning wasn't successful: " - "{}".format(e)) + f"{e}") ep = None try: ep, _created = endpoint_get_or_create( diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 090c75c8972..eb2b1c3adf2 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -722,7 +722,7 @@ def failure_to_add_message(message: str, exception: Exception, object: Any) -> b return False if not is_jira_configured_and_enabled(obj): - message = 'Object %s cannot be pushed to JIRA as there is no JIRA configuration for %s.' % (obj.id, to_str_typed(obj)) + message = f'Object {obj.id} cannot be pushed to JIRA as there is no JIRA configuration for {to_str_typed(obj)}.' return failure_to_add_message(message, None, obj) jira_project = get_jira_project(obj) @@ -884,7 +884,7 @@ def failure_to_update_message(message: str, exception: Exception, obj: Any) -> b jira_instance = get_jira_instance(obj) if not is_jira_configured_and_enabled(obj): - message = 'Object %s cannot be pushed to JIRA as there is no JIRA configuration for %s.' % (obj.id, to_str_typed(obj)) + message = f'Object {obj.id} cannot be pushed to JIRA as there is no JIRA configuration for {to_str_typed(obj)}.' return failure_to_update_message(message, None, obj) j_issue = obj.jira_issue @@ -985,7 +985,7 @@ def get_jira_issue_from_jira(find): j_issue = find.jira_issue if not jira_project: logger.error("Unable to retrieve latest status change from JIRA %s for finding %s as there is no JIRA_Project configured for this finding.", j_issue.jira_key, format(find.id)) - log_jira_alert("Unable to retrieve latest status change from JIRA %s for finding %s as there is no JIRA_Project configured for this finding." % (j_issue.jira_key, find), find) + log_jira_alert(f"Unable to retrieve latest status change from JIRA {j_issue.jira_key} for finding {find} as there is no JIRA_Project configured for this finding.", find) return False meta = None @@ -1203,7 +1203,7 @@ def close_epic(eng, push_to_jira, **kwargs): auth=HTTPBasicAuth(jira_instance.username, jira_instance.password), json=json_data) if r.status_code != 204: - logger.warning("JIRA close epic failed with error: {}".format(r.text)) + logger.warning(f"JIRA close epic failed with error: {r.text}") return False return True except JIRAError as e: @@ -1349,7 +1349,7 @@ def add_comment(obj, note, force_push=False, **kwargs): j_issue = obj.jira_issue jira.add_comment( j_issue.jira_id, - '(%s): %s' % (note.author.get_full_name() if note.author.get_full_name() else note.author.username, note.entry)) + f'({note.author.get_full_name() if note.author.get_full_name() else note.author.username}): {note.entry}') return True except JIRAError as e: log_jira_generic_alert('Jira Add Comment Error', str(e)) @@ -1580,7 +1580,7 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign if resolved: if jira_instance and resolution_name in jira_instance.accepted_resolutions: if not finding.risk_accepted: - logger.debug("Marking related finding of {} as accepted. Creating risk acceptance.".format(jira_issue.jira_key)) + logger.debug(f"Marking related finding of {jira_issue.jira_key} as accepted. Creating risk acceptance.") finding.active = False finding.mitigated = None finding.is_mitigated = False @@ -1594,7 +1594,7 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign status_changed = True elif jira_instance and resolution_name in jira_instance.false_positive_resolutions: if not finding.false_p: - logger.debug("Marking related finding of {} as false-positive".format(jira_issue.jira_key)) + logger.debug(f"Marking related finding of {jira_issue.jira_key} as false-positive") finding.active = False finding.verified = False finding.mitigated = None @@ -1605,7 +1605,7 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign else: # Mitigated by default as before if not finding.is_mitigated: - logger.debug("Marking related finding of {} as mitigated (default)".format(jira_issue.jira_key)) + logger.debug(f"Marking related finding of {jira_issue.jira_key} as mitigated (default)") finding.active = False finding.mitigated = jira_now finding.is_mitigated = True @@ -1617,7 +1617,7 @@ def process_resolution_from_jira(finding, resolution_id, resolution_name, assign else: if not finding.active: # Reopen / Open Jira issue - logger.debug("Re-opening related finding of {}".format(jira_issue.jira_key)) + logger.debug(f"Re-opening related finding of {jira_issue.jira_key}") finding.active = True finding.mitigated = None finding.is_mitigated = False diff --git a/dojo/management/commands/dupecheck.py b/dojo/management/commands/dupecheck.py index ab7432fdec3..92567173c94 100644 --- a/dojo/management/commands/dupecheck.py +++ b/dojo/management/commands/dupecheck.py @@ -17,13 +17,13 @@ def count_the_duplicates(self, model, column): print(" Table:" + str(model) + " Column: " + column) print("===================================") duplicates = model.objects.values(column).annotate(Count('id')).order_by().filter(id__count__gt=1) - kwargs = {'{0}__{1}'.format(column, 'in'): [item[column] for item in duplicates]} + kwargs = {'{}__{}'.format(column, 'in'): [item[column] for item in duplicates]} duplicates = model.objects.filter(**kwargs) if not duplicates: print("No duplicates found") for dupe in duplicates: - print('{0}, Duplicate value: {1}, Object: {2}'.format(dupe.id, getattr(dupe, column), dupe)) + print(f'{dupe.id}, Duplicate value: {getattr(dupe, column)}, Object: {dupe}') def handle(self, *args, **options): self.count_the_duplicates(Product, 'name') diff --git a/dojo/management/commands/import_github_languages.py b/dojo/management/commands/import_github_languages.py index b92ff7921f7..bed838e9f6d 100644 --- a/dojo/management/commands/import_github_languages.py +++ b/dojo/management/commands/import_github_languages.py @@ -33,7 +33,7 @@ def handle(self, *args, **options): try: language_type, created = Language_Type.objects.get_or_create(language=name) except Language_Type.MultipleObjectsReturned: - logger.warning('Language_Type {} exists multiple times'.format(name)) + logger.warning(f'Language_Type {name} exists multiple times') continue if created: @@ -42,4 +42,4 @@ def handle(self, *args, **options): language_type.color = element.get('color', 0) language_type.save() - logger.info('Finished importing languages from GitHub, added {} Language_Types'.format(new_language_types)) + logger.info(f'Finished importing languages from GitHub, added {new_language_types} Language_Types') diff --git a/dojo/management/commands/import_surveys.py b/dojo/management/commands/import_surveys.py index 75c28247262..5fc230fbb4c 100644 --- a/dojo/management/commands/import_surveys.py +++ b/dojo/management/commands/import_surveys.py @@ -28,7 +28,7 @@ def handle(self, *args, **options): # Find the current id in the surveys file path = os.path.dirname(os.path.abspath(__file__)) path = path[:-19] + 'fixtures/initial_surveys.json' - contents = open(path, "rt").readlines() + contents = open(path).readlines() for line in contents: if '"polymorphic_ctype": ' in line: matchedLine = line @@ -37,7 +37,7 @@ def handle(self, *args, **options): old_id = ''.join(c for c in matchedLine if c.isdigit()) new_line = matchedLine.replace(old_id, str(ctype_id)) # Replace the all lines in the file - with open(path, "wt") as fout: + with open(path, "w") as fout: for line in contents: fout.write(line.replace(matchedLine, new_line)) # Delete the temp question diff --git a/dojo/metrics/views.py b/dojo/metrics/views.py index 03872c33eda..c7bef42c76e 100644 --- a/dojo/metrics/views.py +++ b/dojo/metrics/views.py @@ -1033,7 +1033,7 @@ def view_engineer(request, eid): severity='Low' ).count() prod = Product.objects.get(id=product) - all_findings_link = "%s" % ( + all_findings_link = "{}".format( reverse('product_open_findings', args=(prod.id,)), escape(prod.name)) update.append([all_findings_link, z_count, o_count, t_count, h_count, z_count + o_count + t_count + h_count]) @@ -1066,7 +1066,7 @@ def view_engineer(request, eid): mitigated__isnull=True, severity='Low').count() prod = Product.objects.get(id=product) - all_findings_link = "%s" % ( + all_findings_link = "{}".format( reverse('product_open_findings', args=(prod.id,)), escape(prod.name)) total_update.append([all_findings_link, z_count, o_count, t_count, h_count, z_count + o_count + t_count + h_count]) diff --git a/dojo/middleware.py b/dojo/middleware.py index 733c66f4cd4..4e54fe9e813 100644 --- a/dojo/middleware.py +++ b/dojo/middleware.py @@ -63,7 +63,7 @@ def __call__(self, request): return self.get_response(request) -class DojoSytemSettingsMiddleware(object): +class DojoSytemSettingsMiddleware: _thread_local = local() def __init__(self, get_response): @@ -106,7 +106,7 @@ class System_Settings_Manager(models.Manager): def get_from_db(self, *args, **kwargs): # logger.debug('refreshing system_settings from db') try: - from_db = super(System_Settings_Manager, self).get(*args, **kwargs) + from_db = super().get(*args, **kwargs) except: from dojo.models import System_Settings # this mimics the existing code that was in filters.py and utils.py. diff --git a/dojo/models.py b/dojo/models.py index a59af55ee9a..53d9637e01c 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -142,7 +142,7 @@ def __init__(self, directory=None, keep_basename=False, keep_ext=True): def __call__(self, model_instance, filename): base, ext = os.path.splitext(filename) - filename = "%s_%s" % (base, uuid4()) if self.keep_basename else str(uuid4()) + filename = f"{base}_{uuid4()}" if self.keep_basename else str(uuid4()) if self.keep_ext: filename += ext if self.directory is None: @@ -219,9 +219,7 @@ def generate_full_name(user): """ Returns the first_name plus the last_name, with a space in between. """ - full_name = '%s %s (%s)' % (user.first_name, - user.last_name, - user.username) + full_name = f'{user.first_name} {user.last_name} ({user.username})' return full_name.strip() @@ -690,12 +688,12 @@ def copy(self): copy.pk = None copy.id = None # Add unique modifier to file name - copy.title = '{} - clone-{}'.format(self.title, str(uuid4())[:8]) + copy.title = f'{self.title} - clone-{str(uuid4())[:8]}' # Create new unique file name current_url = self.file.url _, current_full_filename = current_url.rsplit('/', 1) _, extension = current_full_filename.split('.', 1) - new_file = ContentFile(self.file.read(), name='{}.{}'.format(uuid4(), extension)) + new_file = ContentFile(self.file.read(), name=f'{uuid4()}.{extension}') copy.file = new_file copy.save() @@ -709,11 +707,7 @@ def get_accessible_url(self, obj, obj_id): elif isinstance(obj, Finding): obj_type = 'Finding' - return 'access_file/{file_id}/{obj_id}/{obj_type}'.format( - file_id=self.id, - obj_id=obj_id, - obj_type=obj_type - ) + return f'access_file/{self.id}/{obj_id}/{obj_type}' class Product_Type(models.Model): @@ -857,7 +851,7 @@ def clean(self): raise ValidationError('Metadata entries may not have more than one relation, either a product, an endpoint either or a finding') def __str__(self): - return "%s: %s" % (self.name, self.value) + return f"{self.name}: {self.value}" class Meta: unique_together = (('product', 'name'), @@ -899,7 +893,7 @@ def save(self, *args, **kwargs): self.medium = initial_sla_config.medium self.low = initial_sla_config.low - super(SLA_Configuration, self).save(*args, **kwargs) + super().save(*args, **kwargs) # if the initial sla config exists and async finding update is not running if initial_sla_config is not None and not self.async_updating: @@ -917,7 +911,7 @@ def save(self, *args, **kwargs): if len(severities): # set the async updating flag to true for this sla config self.async_updating = True - super(SLA_Configuration, self).save(*args, **kwargs) + super().save(*args, **kwargs) # set the async updating flag to true for all products using this sla config products = Product.objects.filter(sla_configuration=self) for product in products: @@ -1060,7 +1054,7 @@ def save(self, *args, **kwargs): if initial_sla_config and self.async_updating: self.sla_configuration = initial_sla_config - super(Product, self).save(*args, **kwargs) + super().save(*args, **kwargs) # if the initial sla config exists and async finding update is not running if initial_sla_config is not None and not self.async_updating: @@ -1070,7 +1064,7 @@ def save(self, *args, **kwargs): if new_sla_config and (initial_sla_config != new_sla_config): # set the async updating flag to true for this product self.async_updating = True - super(Product, self).save(*args, **kwargs) + super().save(*args, **kwargs) # set the async updating flag to true for the sla config assigned to this product sla_config = getattr(self, 'sla_configuration', None) if sla_config: @@ -1552,7 +1546,7 @@ def age(self): return days if days > 0 else 0 def __str__(self): - return "'{}' on '{}'".format(str(self.finding), str(self.endpoint)) + return f"'{str(self.finding)}' on '{str(self.endpoint)}'" def copy(self, finding=None): copy = self @@ -1617,13 +1611,13 @@ def clean(self): db_type = connection.vendor if self.protocol or self.protocol == '': if not re.match(r'^[A-Za-z][A-Za-z0-9\.\-\+]+$', self.protocol): # https://tools.ietf.org/html/rfc3986#section-3.1 - errors.append(ValidationError('Protocol "{}" has invalid format'.format(self.protocol))) + errors.append(ValidationError(f'Protocol "{self.protocol}" has invalid format')) if self.protocol == '': self.protocol = None if self.userinfo or self.userinfo == '': if not re.match(r'^[A-Za-z0-9\.\-_~%\!\$&\'\(\)\*\+,;=:]+$', self.userinfo): # https://tools.ietf.org/html/rfc3986#section-3.2.1 - errors.append(ValidationError('Userinfo "{}" has invalid format'.format(self.userinfo))) + errors.append(ValidationError(f'Userinfo "{self.userinfo}" has invalid format')) if self.userinfo == '': self.userinfo = None @@ -1632,7 +1626,7 @@ def clean(self): try: validate_ipv46_address(self.host) except ValidationError: - errors.append(ValidationError('Host "{}" has invalid format'.format(self.host))) + errors.append(ValidationError(f'Host "{self.host}" has invalid format')) else: errors.append(ValidationError('Host must not be empty')) @@ -1640,10 +1634,10 @@ def clean(self): try: int_port = int(self.port) if not (0 <= int_port < 65536): - errors.append(ValidationError('Port "{}" has invalid format - out of range'.format(self.port))) + errors.append(ValidationError(f'Port "{self.port}" has invalid format - out of range')) self.port = int_port except ValueError: - errors.append(ValidationError('Port "{}" has invalid format - it is not a number'.format(self.port))) + errors.append(ValidationError(f'Port "{self.port}" has invalid format - it is not a number')) if self.path or self.path == '': while len(self.path) > 0 and self.path[0] == "/": # Endpoint store "root-less" path @@ -1654,7 +1648,7 @@ def clean(self): action_string = 'Postgres does not accept NULL character. Attempting to replace with %00...' for remove_str in null_char_list: self.path = self.path.replace(remove_str, '%00') - errors.append(ValidationError('Path "{}" has invalid format - It contains the NULL character. The following action was taken: {}'.format(old_value, action_string))) + errors.append(ValidationError(f'Path "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')) if self.path == '': self.path = None @@ -1667,7 +1661,7 @@ def clean(self): action_string = 'Postgres does not accept NULL character. Attempting to replace with %00...' for remove_str in null_char_list: self.query = self.query.replace(remove_str, '%00') - errors.append(ValidationError('Query "{}" has invalid format - It contains the NULL character. The following action was taken: {}'.format(old_value, action_string))) + errors.append(ValidationError(f'Query "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')) if self.query == '': self.query = None @@ -1680,7 +1674,7 @@ def clean(self): action_string = 'Postgres does not accept NULL character. Attempting to replace with %00...' for remove_str in null_char_list: self.fragment = self.fragment.replace(remove_str, '%00') - errors.append(ValidationError('Fragment "{}" has invalid format - It contains the NULL character. The following action was taken: {}'.format(old_value, action_string))) + errors.append(ValidationError(f'Fragment "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')) if self.fragment == '': self.fragment = None @@ -1699,11 +1693,11 @@ def __str__(self): path=tuple(self.path.split('/')) if self.path else (), query=tuple( ( - qe.split(u"=", 1) - if u"=" in qe + qe.split("=", 1) + if "=" in qe else (qe, None) ) - for qe in self.query.split(u"&") + for qe in self.query.split("&") ) if self.query else (), # inspired by https://github.com/python-hyper/hyperlink/blob/b8c9152cd826bbe8e6cc125648f3738235019705/src/hyperlink/_url.py#L1427 fragment=self.fragment or '' ) @@ -1722,19 +1716,19 @@ def __str__(self): except: url = '' if self.protocol: - url += '{}://'.format(self.protocol) + url += f'{self.protocol}://' if self.userinfo: - url += '{}@'.format(self.userinfo) + url += f'{self.userinfo}@' if self.host: url += self.host if self.port: - url += ':{}'.format(self.port) + url += f':{self.port}' if self.path: url += '{}{}'.format('/' if self.path[0] != '/' else '', self.path) if self.query: - url += '?{}'.format(self.query) + url += f'?{self.query}' if self.fragment: - url += '#{}'.format(self.fragment) + url += f'#{self.fragment}' return url def __hash__(self): @@ -1905,7 +1899,7 @@ def from_uri(uri): from urllib.parse import urlparse url = hyperlink.parse(url="//" + urlparse(uri).netloc) except hyperlink.URLParseError as e: - raise ValidationError('Invalid URL format: {}'.format(e)) + raise ValidationError(f'Invalid URL format: {e}') query_parts = [] # inspired by https://github.com/python-hyper/hyperlink/blob/b8c9152cd826bbe8e6cc125648f3738235019705/src/hyperlink/_url.py#L1768 for k, v in url.query: @@ -1913,7 +1907,7 @@ def from_uri(uri): query_parts.append(k) else: query_parts.append(f"{k}={v}") - query_string = u"&".join(query_parts) + query_string = "&".join(query_parts) protocol = url.scheme if url.scheme != '' else None userinfo = ':'.join(url.userinfo) if url.userinfo not in [(), ('',)] else None @@ -2019,7 +2013,7 @@ def test_type_name(self) -> str: def __str__(self): if self.title: - return "%s (%s)" % (self.title, self.test_type) + return f"{self.title} ({self.test_type})" return str(self.test_type) def get_breadcrumbs(self): @@ -2561,7 +2555,7 @@ class Meta: ] def __init__(self, *args, **kwargs): - super(Finding, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.unsaved_endpoints = [] self.unsaved_request = None @@ -3053,7 +3047,7 @@ def save(self, dedupe_option=True, rules_option=True, product_grading_option=Tru self.set_sla_expiration_date() logger.debug("Saving finding of id " + str(self.id) + " dedupe_option:" + str(dedupe_option) + " (self.pk is %s)", "None" if self.pk is None else "not None") - super(Finding, self).save(*args, **kwargs) + super().save(*args, **kwargs) self.found_by.add(self.test.test_type) @@ -3087,7 +3081,7 @@ def get_breadcrumbs(self): return bc def get_valid_request_response_pairs(self): - empty_value = base64.b64encode("".encode()) + empty_value = base64.b64encode(b"") # Get a list of all req/resp pairs all_req_resps = self.burprawrequestresponse_set.all() # Filter away those that do not have any contents @@ -3711,7 +3705,7 @@ class FileAccessToken(models.Model): def save(self, *args, **kwargs): if not self.token: self.token = uuid4() - return super(FileAccessToken, self).save(*args, **kwargs) + return super().save(*args, **kwargs) ANNOUNCEMENT_STYLE_CHOICES = ( diff --git a/dojo/notifications/helper.py b/dojo/notifications/helper.py index 4cfa65bdda2..1ee9a9be207 100644 --- a/dojo/notifications/helper.py +++ b/dojo/notifications/helper.py @@ -93,7 +93,7 @@ def create_notification(event=None, **kwargs): queryset=Notifications.objects.filter(Q(product_id=product) | Q(product__isnull=True)), to_attr="applicable_notifications" )).annotate(applicable_notifications_count=Count('notifications__id', filter=Q(notifications__product_id=product) | Q(notifications__product__isnull=True)))\ - .filter((Q(applicable_notifications_count__gt=0) | Q(is_superuser=True))) + .filter(Q(applicable_notifications_count__gt=0) | Q(is_superuser=True)) # only send to authorized users or admin/superusers logger.debug('Filtering users for the product %s', product) @@ -126,11 +126,11 @@ def create_notification(event=None, **kwargs): def create_description(event, *args, **kwargs): if "description" not in kwargs.keys(): if event == 'product_added': - kwargs["description"] = _('Product %(title)s has been created successfully.' % {'title': kwargs['title']}) + kwargs["description"] = _('Product {title} has been created successfully.'.format(title=kwargs['title'])) elif event == 'product_type_added': - kwargs["description"] = _('Product Type %(title)s has been created successfully.' % {'title': kwargs['title']}) + kwargs["description"] = _('Product Type {title} has been created successfully.'.format(title=kwargs['title'])) else: - kwargs["description"] = _('Event %(event)s has occurred.' % {'event': str(event)}) + kwargs["description"] = _('Event {event} has occurred.'.format(event=str(event))) return kwargs["description"] @@ -227,7 +227,7 @@ def _post_slack_message(channel): # only send notification if we managed to find the slack_user_id if slack_user_id: - channel = '@{}'.format(slack_user_id) + channel = f'@{slack_user_id}' _post_slack_message(channel) else: logger.info("The user %s does not have a email address informed for Slack in profile.", user) @@ -235,7 +235,7 @@ def _post_slack_message(channel): # System scope slack notifications, and not personal would still see this go through if get_system_setting('slack_channel') is not None: channel = get_system_setting('slack_channel') - logger.info("Sending system notification to system channel {}.".format(channel)) + logger.info(f"Sending system notification to system channel {channel}.") _post_slack_message(channel) else: logger.debug('slack_channel not configured: skipping system notification') @@ -354,10 +354,10 @@ def get_slack_user_id(user_email): if user_email == user["user"]["profile"]["email"]: if "id" in user["user"]: user_id = user["user"]["id"] - logger.debug("Slack user ID is {}".format(user_id)) + logger.debug(f"Slack user ID is {user_id}") slack_user_is_found = True else: - logger.warning("A user with email {} could not be found in this Slack workspace.".format(user_email)) + logger.warning(f"A user with email {user_email} could not be found in this Slack workspace.") if not slack_user_is_found: logger.warning("The Slack user was not found.") diff --git a/dojo/okta.py b/dojo/okta.py index 27ea21084db..856f6004e8d 100644 --- a/dojo/okta.py +++ b/dojo/okta.py @@ -12,7 +12,7 @@ from social_core.backends.open_id_connect import OpenIdConnectAuth -class OktaMixin(object): +class OktaMixin: def api_url(self): return append_slash(self.setting('API_URL')) diff --git a/dojo/product_type/views.py b/dojo/product_type/views.py index 84bb14c108e..fbc4aae063e 100644 --- a/dojo/product_type/views.py +++ b/dojo/product_type/views.py @@ -139,7 +139,7 @@ def delete_product_type(request, ptid): create_notification(event='other', title='Deletion of %s' % product_type.name, no_users=True, - description='The product type "%s" was deleted by %s' % (product_type.name, request.user), + description=f'The product type "{product_type.name}" was deleted by {request.user}', url=request.build_absolute_uri(reverse('product_type')), icon="exclamation-triangle") return HttpResponseRedirect(reverse('product_type')) diff --git a/dojo/reports/widgets.py b/dojo/reports/widgets.py index 36831c4ad0c..09eb1646203 100644 --- a/dojo/reports/widgets.py +++ b/dojo/reports/widgets.py @@ -57,7 +57,7 @@ def __init__(self, attrs=None): default_attrs = {'style': 'width:100%;min-height:400px'} if attrs: default_attrs.update(attrs) - super(Div, self).__init__(default_attrs) + super().__init__(default_attrs) def render(self, name, value, attrs=None, renderer=None): if value is None: @@ -109,7 +109,7 @@ class Meta: # base Widget class others will inherit from -class Widget(object): +class Widget: def __init__(self, *args, **kwargs): self.title = 'Base Widget' self.form = None @@ -130,7 +130,7 @@ def get_option_form(self): class PageBreak(Widget): def __init__(self, *args, **kwargs): - super(PageBreak, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.title = 'Page Break' self.form = None self.multiple = "true" @@ -151,7 +151,7 @@ def get_option_form(self): class ReportOptions(Widget): def __init__(self, *args, **kwargs): - super(ReportOptions, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.title = 'Report Options' self.form = CustomReportOptionsForm() self.extra_help = "Choose additional report options. These will apply to the overall report." @@ -172,7 +172,7 @@ def get_option_form(self): class CoverPage(Widget): def __init__(self, *args, **kwargs): - super(CoverPage, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.title = 'Cover Page' self.form = CoverPageForm() self.help_text = "The cover page includes a page break after its content." @@ -197,7 +197,7 @@ def get_option_form(self): class TableOfContents(Widget): def __init__(self, *args, **kwargs): - super(TableOfContents, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.title = 'Table Of Contents' self.form = TableOfContentsForm() self.help_text = "The table of contents includes a page break after its content." @@ -220,7 +220,7 @@ def get_option_form(self): class WYSIWYGContent(Widget): def __init__(self, *args, **kwargs): - super(WYSIWYGContent, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.title = 'WYSIWYG Content' self.form = WYSIWYGContentForm() self.multiple = 'true' @@ -267,7 +267,7 @@ def __init__(self, *args, **kwargs): else: self.finding_images = False - super(FindingList, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.title = 'Finding List' if hasattr(self.findings, 'form'): @@ -342,7 +342,7 @@ def __init__(self, *args, **kwargs): else: self.finding_images = False - super(EndpointList, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.title = 'Endpoint List' self.form = self.endpoints.form diff --git a/dojo/risk_acceptance/helper.py b/dojo/risk_acceptance/helper.py index 8034ce713c4..7fcaa796c9a 100644 --- a/dojo/risk_acceptance/helper.py +++ b/dojo/risk_acceptance/helper.py @@ -208,8 +208,7 @@ def accepted_message_creator(risk_acceptance, heads_up_days=0): def unaccepted_message_creator(risk_acceptance, heads_up_days=0): if risk_acceptance: - return 'finding was unaccepted/deleted from risk acceptance [(%s)|%s]' % \ - (escape_for_jira(risk_acceptance.name), + return 'finding was unaccepted/deleted from risk acceptance [({})|{}]'.format(escape_for_jira(risk_acceptance.name), get_full_url(reverse('view_risk_acceptance', args=(risk_acceptance.engagement.id, risk_acceptance.id)))) else: return 'Finding is no longer risk accepted' diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 2126ad82d9f..add788caaa7 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1261,10 +1261,10 @@ def saml2_attrib_map_format(dict): env_hashcode_fields_per_scanner = json.loads(env('DD_HASHCODE_FIELDS_PER_SCANNER')) for key, value in env_hashcode_fields_per_scanner.items(): if key in HASHCODE_FIELDS_PER_SCANNER: - logger.info("Replacing {} with value {} (previously set to {}) from env var DD_HASHCODE_FIELDS_PER_SCANNER".format(key, value, HASHCODE_FIELDS_PER_SCANNER[key])) + logger.info(f"Replacing {key} with value {value} (previously set to {HASHCODE_FIELDS_PER_SCANNER[key]}) from env var DD_HASHCODE_FIELDS_PER_SCANNER") HASHCODE_FIELDS_PER_SCANNER[key] = value if key not in HASHCODE_FIELDS_PER_SCANNER: - logger.info("Adding {} with value {} from env var DD_HASHCODE_FIELDS_PER_SCANNER".format(key, value)) + logger.info(f"Adding {key} with value {value} from env var DD_HASHCODE_FIELDS_PER_SCANNER") HASHCODE_FIELDS_PER_SCANNER[key] = value @@ -1477,10 +1477,10 @@ def saml2_attrib_map_format(dict): env_dedup_algorithm_per_parser = json.loads(env('DD_DEDUPLICATION_ALGORITHM_PER_PARSER')) for key, value in env_dedup_algorithm_per_parser.items(): if key in DEDUPLICATION_ALGORITHM_PER_PARSER: - logger.info("Replacing {} with value {} (previously set to {}) from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER".format(key, value, DEDUPLICATION_ALGORITHM_PER_PARSER[key])) + logger.info(f"Replacing {key} with value {value} (previously set to {DEDUPLICATION_ALGORITHM_PER_PARSER[key]}) from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER") DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value if key not in DEDUPLICATION_ALGORITHM_PER_PARSER: - logger.info("Adding {} with value {} from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER".format(key, value)) + logger.info(f"Adding {key} with value {value} from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER") DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value DUPE_DELETE_MAX_PER_RUN = env('DD_DUPE_DELETE_MAX_PER_RUN') diff --git a/dojo/survey/views.py b/dojo/survey/views.py index 02fc9f74d59..02191342a85 100644 --- a/dojo/survey/views.py +++ b/dojo/survey/views.py @@ -484,8 +484,7 @@ def create_question(request): error = True if '_popup' in request.GET and not error: - resp = '' \ - % (escape(created_question._get_pk_val()), escape(created_question.text)) + resp = f'' resp += '' return HttpResponse(resp) @@ -577,8 +576,7 @@ def add_choices(request): if '_popup' in request.GET: resp = '' if created: - resp = '' \ - % (escape(choice._get_pk_val()), escape(choice.label)) + resp = f'' resp += '' return HttpResponse(resp) add_breadcrumb(title="Add Choice", top_level=False, request=request) diff --git a/dojo/tasks.py b/dojo/tasks.py index 50d48049a80..25d258f9ee8 100644 --- a/dojo/tasks.py +++ b/dojo/tasks.py @@ -43,7 +43,7 @@ def add_alerts(self, runinterval): for eng in stale_engagements: create_notification(event='stale_engagement', title='Stale Engagement: %s' % eng.name, - description='The engagement "%s" is stale. Target end was %s.' % (eng.name, eng.target_end.strftime("%b. %d, %Y")), + description='The engagement "{}" is stale. Target end was {}.'.format(eng.name, eng.target_end.strftime("%b. %d, %Y")), url=reverse('view_engagement', args=(eng.id,)), recipients=[eng.lead]) @@ -57,7 +57,7 @@ def add_alerts(self, runinterval): for eng in unclosed_engagements: create_notification(event='auto_close_engagement', title=eng.name, - description='The engagement "%s" has auto-closed. Target end was %s.' % (eng.name, eng.target_end.strftime("%b. %d, %Y")), + description='The engagement "{}" has auto-closed. Target end was {}.'.format(eng.name, eng.target_end.strftime("%b. %d, %Y")), url=reverse('view_engagement', args=(eng.id,)), recipients=[eng.lead]) @@ -139,8 +139,8 @@ def async_dupe_delete(*args, **kwargs): originals_with_too_many_duplicates = Finding.objects.filter(id__in=originals_with_too_many_duplicates_ids).order_by('id') # prefetch to make it faster - originals_with_too_many_duplicates = originals_with_too_many_duplicates.prefetch_related((Prefetch("original_finding", - queryset=Finding.objects.filter(duplicate=True).order_by('date')))) + originals_with_too_many_duplicates = originals_with_too_many_duplicates.prefetch_related(Prefetch("original_finding", + queryset=Finding.objects.filter(duplicate=True).order_by('date'))) total_deleted_count = 0 for original in originals_with_too_many_duplicates: @@ -148,7 +148,7 @@ def async_dupe_delete(*args, **kwargs): dupe_count = len(duplicate_list) - dupe_max for finding in duplicate_list: - deduplicationLogger.debug('deleting finding {}:{} ({}))'.format(finding.id, finding.title, finding.hash_code)) + deduplicationLogger.debug(f'deleting finding {finding.id}:{finding.title} ({finding.hash_code}))') finding.delete() total_deleted_count += 1 dupe_count -= 1 @@ -177,7 +177,7 @@ def async_sla_compute_and_notify_task(*args, **kwargs): sla_compute_and_notify(*args, **kwargs) except Exception as e: logger.exception(e) - logger.error("An unexpected error was thrown calling the SLA code: {}".format(e)) + logger.error(f"An unexpected error was thrown calling the SLA code: {e}") @app.task diff --git a/dojo/templatetags/display_tags.py b/dojo/templatetags/display_tags.py index f49fba54bc9..221daf04132 100644 --- a/dojo/templatetags/display_tags.py +++ b/dojo/templatetags/display_tags.py @@ -135,7 +135,7 @@ def dojo_version(): version = __version__ if settings.FOOTER_VERSION: version = settings.FOOTER_VERSION - return "v. {}".format(version) + return f"v. {version}" @register.simple_tag diff --git a/dojo/templatetags/navigation_tags.py b/dojo/templatetags/navigation_tags.py index a6d53f00820..e446dd842de 100644 --- a/dojo/templatetags/navigation_tags.py +++ b/dojo/templatetags/navigation_tags.py @@ -69,7 +69,7 @@ def dojo_sort(request, display='Name', value='title', default=None): return safe(link) -class PaginationNav(object): +class PaginationNav: def __init__(self, page_number=None, display=None, is_current=False): self.page_number = page_number self.is_current = is_current diff --git a/dojo/test/views.py b/dojo/test/views.py index ca3d60e16fa..0ddd4464040 100644 --- a/dojo/test/views.py +++ b/dojo/test/views.py @@ -346,7 +346,7 @@ def copy_test(request, tid): extra_tags='alert-success') create_notification(event='other', title='Copying of %s' % test.title, - description='The test "%s" was copied by %s to %s' % (test.title, request.user, engagement.name), + description=f'The test "{test.title}" was copied by {request.user} to {engagement.name}', product=product, url=request.build_absolute_uri(reverse('view_test', args=(test_copy.id,))), recipients=[test.engagement.lead], @@ -412,7 +412,7 @@ def test_ics(request, tid): _("Set aside for test %(test_type_name)s, on product %(product_name)s. Additional detail can be found at %(detail_url)s") % { 'test_type_name': test.test_type.name, 'product_name': test.engagement.product.name, - 'detail_url': request.build_absolute_uri((reverse("view_test", args=(test.id,)))) + 'detail_url': request.build_absolute_uri(reverse("view_test", args=(test.id,))) }, uid) output = cal.serialize() diff --git a/dojo/tools/acunetix/parse_acunetix360_json.py b/dojo/tools/acunetix/parse_acunetix360_json.py index f9fff0b109c..0f8c01c5817 100644 --- a/dojo/tools/acunetix/parse_acunetix360_json.py +++ b/dojo/tools/acunetix/parse_acunetix360_json.py @@ -5,7 +5,7 @@ from dojo.models import Endpoint, Finding -class AcunetixJSONParser(object): +class AcunetixJSONParser: """This parser is written for Acunetix JSON Findings.""" def get_findings(self, filename, test): dupes = dict() diff --git a/dojo/tools/acunetix/parse_acunetix_xml.py b/dojo/tools/acunetix/parse_acunetix_xml.py index 12ca4100a03..529da45e998 100644 --- a/dojo/tools/acunetix/parse_acunetix_xml.py +++ b/dojo/tools/acunetix/parse_acunetix_xml.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) -class AcunetixXMLParser(object): +class AcunetixXMLParser: """This parser is written for Acunetix XML reports""" def get_findings(self, filename, test): dupes = dict() @@ -54,7 +54,7 @@ def get_findings(self, filename, test): for reference in item.findall("References/Reference"): url = reference.findtext("URL") db = reference.findtext("Database") or url - references.append(" * [{}]({})".format(db, url)) + references.append(f" * [{db}]({url})") if len(references) > 0: finding.references = "\n".join(references) if item.findtext("CVSS3/Descriptor"): @@ -128,9 +128,7 @@ def get_findings(self, filename, test): find.unsaved_req_resp.extend(finding.unsaved_req_resp) find.nb_occurences += finding.nb_occurences logger.debug( - "Duplicate finding : {defectdojo_title}".format( - defectdojo_title=finding.title - ) + f"Duplicate finding : {finding.title}" ) else: dupes[dupe_key] = finding diff --git a/dojo/tools/acunetix/parser.py b/dojo/tools/acunetix/parser.py index 9d0ee771230..272f295acf4 100644 --- a/dojo/tools/acunetix/parser.py +++ b/dojo/tools/acunetix/parser.py @@ -2,7 +2,7 @@ from dojo.tools.acunetix.parse_acunetix_xml import AcunetixXMLParser -class AcunetixParser(object): +class AcunetixParser: """Parser for Acunetix XML files and Acunetix 360 JSON files.""" def get_scan_types(self): diff --git a/dojo/tools/anchore_engine/parser.py b/dojo/tools/anchore_engine/parser.py index 3b9e0bc546d..f734b4a4890 100644 --- a/dojo/tools/anchore_engine/parser.py +++ b/dojo/tools/anchore_engine/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class AnchoreEngineParser(object): +class AnchoreEngineParser: def get_scan_types(self): return ["Anchore Engine Scan"] diff --git a/dojo/tools/anchore_enterprise/parser.py b/dojo/tools/anchore_enterprise/parser.py index 899e600a51e..82dad174d74 100644 --- a/dojo/tools/anchore_enterprise/parser.py +++ b/dojo/tools/anchore_enterprise/parser.py @@ -69,9 +69,7 @@ def get_findings(self, filename, test): test=test, description=description, severity=severity, - references="Policy ID: {}\nTrigger ID: {}".format( - policyid, triggerid - ), + references=f"Policy ID: {policyid}\nTrigger ID: {triggerid}", file_path=search_filepath(description), component_name=repo, component_version=tag, @@ -86,9 +84,7 @@ def get_findings(self, filename, test): items.append(find) except (KeyError, IndexError) as err: raise ValueError( - "Invalid format: {} key not found".format( - err - ) + f"Invalid format: {err} key not found" ) except AttributeError as err: # import empty policies without error (e.g. policies or images diff --git a/dojo/tools/anchore_grype/parser.py b/dojo/tools/anchore_grype/parser.py index 9854bf34d5b..ad04aaf3b75 100644 --- a/dojo/tools/anchore_grype/parser.py +++ b/dojo/tools/anchore_grype/parser.py @@ -5,7 +5,7 @@ from dojo.models import Finding -class AnchoreGrypeParser(object): +class AnchoreGrypeParser: """Anchore Grype JSON report format generated with `-o json` option. command: `grype defectdojo/defectdojo-django:1.13.1 -o json > many_vulns.json` diff --git a/dojo/tools/anchorectl_policies/parser.py b/dojo/tools/anchorectl_policies/parser.py index 1df2fa94f95..3de3f5d0c19 100644 --- a/dojo/tools/anchorectl_policies/parser.py +++ b/dojo/tools/anchorectl_policies/parser.py @@ -54,9 +54,7 @@ def get_findings(self, filename, test): test=test, description=description, severity=severity, - references="Policy ID: {}\nTrigger ID: {}".format( - policy_id, trigger_id - ), + references=f"Policy ID: {policy_id}\nTrigger ID: {trigger_id}", file_path=search_filepath(description), component_name=repo, component_version=tag, @@ -69,7 +67,7 @@ def get_findings(self, filename, test): items.append(find) except (KeyError, IndexError) as err: raise ValueError( - "Invalid format: {} key not found".format(err) + f"Invalid format: {err} key not found" ) except AttributeError as err: # import empty policies without error (e.g. policies or images diff --git a/dojo/tools/anchorectl_vulns/parser.py b/dojo/tools/anchorectl_vulns/parser.py index 77c350b56bd..652ac821afb 100644 --- a/dojo/tools/anchorectl_vulns/parser.py +++ b/dojo/tools/anchorectl_vulns/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class AnchoreCTLVulnsParser(object): +class AnchoreCTLVulnsParser: def get_scan_types(self): return ["AnchoreCTL Vuln Report"] diff --git a/dojo/tools/api_blackduck/api_client.py b/dojo/tools/api_blackduck/api_client.py index fb8058bc8ba..a2895dc5bee 100644 --- a/dojo/tools/api_blackduck/api_client.py +++ b/dojo/tools/api_blackduck/api_client.py @@ -17,9 +17,7 @@ def __init__(self, tool_config): ) else: raise ValueError( - "Authentication type {} not supported".format( - tool_config.authentication_type - ) + f"Authentication type {tool_config.authentication_type} not supported" ) # TODO diff --git a/dojo/tools/api_blackduck/importer.py b/dojo/tools/api_blackduck/importer.py index cf7a143bb9c..af0917a0e5e 100644 --- a/dojo/tools/api_blackduck/importer.py +++ b/dojo/tools/api_blackduck/importer.py @@ -4,7 +4,7 @@ from .api_client import BlackduckAPI -class BlackduckApiImporter(object): +class BlackduckApiImporter: """ Import from BlackDuck API """ diff --git a/dojo/tools/api_blackduck/parser.py b/dojo/tools/api_blackduck/parser.py index be76f28c6ef..0be66807877 100644 --- a/dojo/tools/api_blackduck/parser.py +++ b/dojo/tools/api_blackduck/parser.py @@ -7,7 +7,7 @@ SCAN_TYPE_ID = "BlackDuck API" -class ApiBlackduckParser(object): +class ApiBlackduckParser: """ Import from Synopsys BlackDuck API /findings """ diff --git a/dojo/tools/api_bugcrowd/api_client.py b/dojo/tools/api_bugcrowd/api_client.py index ddb73f0b0ae..f1207bd77a4 100644 --- a/dojo/tools/api_bugcrowd/api_client.py +++ b/dojo/tools/api_bugcrowd/api_client.py @@ -19,14 +19,12 @@ def __init__(self, tool_config): if tool_config.authentication_type == "API": self.api_token = tool_config.api_key self.session.headers.update( - {"Authorization": "Token {}".format(self.api_token)} + {"Authorization": f"Token {self.api_token}"} ) self.session.headers.update(self.default_headers) else: raise Exception( - "bugcrowd Authentication type {} not supported".format( - tool_config.authentication_type - ) + f"bugcrowd Authentication type {tool_config.authentication_type} not supported" ) def get_findings(self, program, target): @@ -53,9 +51,7 @@ def get_findings(self, program, target): else: params_encoded = urlencode(params_default) - next = "{}/submissions?{}".format( - self.bugcrowd_api_url, params_encoded - ) + next = f"{self.bugcrowd_api_url}/submissions?{params_encoded}" while next != "": response = self.session.get(url=next) response.raise_for_status() @@ -79,13 +75,13 @@ def get_findings(self, program, target): def test_connection(self): # Request programs response_programs = self.session.get( - url="{}/programs".format(self.bugcrowd_api_url) + url=f"{self.bugcrowd_api_url}/programs" ) response_programs.raise_for_status() # Request submissions to validate the org token response_subs = self.session.get( - url="{}/submissions".format(self.bugcrowd_api_url) + url=f"{self.bugcrowd_api_url}/submissions" ) response_subs.raise_for_status() if response_programs.ok and response_subs.ok: @@ -99,7 +95,7 @@ def test_connection(self): ) # Request targets to validate the org token response_targets = self.session.get( - url="{}/targets".format(self.bugcrowd_api_url) + url=f"{self.bugcrowd_api_url}/targets" ) response_targets.raise_for_status() if response_targets.ok: @@ -119,16 +115,12 @@ def test_connection(self): else: raise Exception( "Bugcrowd API test not successful, no targets were defined in Bugcrowd which is used for " - "filtering, check your configuration, HTTP response was: {}".format( - response_targets.text - ) + f"filtering, check your configuration, HTTP response was: {response_targets.text}" ) else: raise Exception( "Bugcrowd API test not successful, could not retrieve the programs or submissions, check your " - "configuration, HTTP response for programs was: {}, HTTP response for submissions was: {}".format( - response_programs.text, response_subs.text - ) + f"configuration, HTTP response for programs was: {response_programs.text}, HTTP response for submissions was: {response_subs.text}" ) def test_product_connection(self, api_scan_configuration): diff --git a/dojo/tools/api_bugcrowd/importer.py b/dojo/tools/api_bugcrowd/importer.py index 3e41b6be20a..56b7ca1a228 100644 --- a/dojo/tools/api_bugcrowd/importer.py +++ b/dojo/tools/api_bugcrowd/importer.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) -class BugcrowdApiImporter(object): +class BugcrowdApiImporter: """ Import from Bugcrowd API """ @@ -14,9 +14,7 @@ class BugcrowdApiImporter(object): def get_findings(self, test): client, config = self.prepare_client(test) logger.debug( - "Fetching submissions program {} and target {}".format( - str(config.service_key_1), str(config.service_key_2) - ) + f"Fetching submissions program {str(config.service_key_1)} and target {str(config.service_key_2)}" ) submissions_paged = client.get_findings( @@ -29,7 +27,7 @@ def get_findings(self, test): for page in submissions_paged: submissions += page counter += 1 - logger.debug("{} Bugcrowd submissions pages fetched".format(counter)) + logger.debug(f"{counter} Bugcrowd submissions pages fetched") return submissions, config diff --git a/dojo/tools/api_bugcrowd/parser.py b/dojo/tools/api_bugcrowd/parser.py index 17cc04f84d5..df119ed576e 100644 --- a/dojo/tools/api_bugcrowd/parser.py +++ b/dojo/tools/api_bugcrowd/parser.py @@ -16,7 +16,7 @@ logger = logging.getLogger(__name__) -class ApiBugcrowdParser(object): +class ApiBugcrowdParser: """ Import from Bugcrowd API /submissions """ @@ -158,15 +158,11 @@ def get_findings(self, file, test): finding.unsaved_endpoints = [bug_endpoint] except Exception as e: logger.error( - "{} bug url from bugcrowd failed to parse to endpoint, error= {}".format( - str(bug_endpoint), e - ) + f"{str(bug_endpoint)} bug url from bugcrowd failed to parse to endpoint, error= {e}" ) except ValidationError: logger.error( - "Broken Bugcrowd endpoint {} was skipped.".format( - bug_endpoint.host - ) + f"Broken Bugcrowd endpoint {bug_endpoint.host} was skipped." ) findings.append(finding) diff --git a/dojo/tools/api_cobalt/api_client.py b/dojo/tools/api_cobalt/api_client.py index f51cef1dbee..e18ed6f01f6 100644 --- a/dojo/tools/api_cobalt/api_client.py +++ b/dojo/tools/api_cobalt/api_client.py @@ -15,9 +15,7 @@ def __init__(self, tool_config): self.org_token = tool_config.extras else: raise Exception( - "Cobalt.io Authentication type {} not supported".format( - tool_config.authentication_type - ) + f"Cobalt.io Authentication type {tool_config.authentication_type} not supported" ) def get_asset(self, asset_id): @@ -32,12 +30,12 @@ def get_asset(self, asset_id): if asset["resource"]["id"] == asset_id: return asset - raise Exception("Asset {} not found in organisation".format(asset_id)) + raise Exception(f"Asset {asset_id} not found in organisation") def get_assets(self): """Returns all org assets""" response = self.session.get( - url="{}/assets?limit=1000".format(self.cobalt_api_url), + url=f"{self.cobalt_api_url}/assets?limit=1000", headers=self.get_headers(), ) @@ -57,9 +55,7 @@ def get_findings(self, asset_id): :return: """ response = self.session.get( - url="{}/findings?limit=1000&asset={}".format( - self.cobalt_api_url, asset_id - ), + url=f"{self.cobalt_api_url}/findings?limit=1000&asset={asset_id}", headers=self.get_headers(), ) @@ -75,13 +71,13 @@ def get_findings(self, asset_id): def test_connection(self): # Request orgs for the org name response_orgs = self.session.get( - url="{}/orgs".format(self.cobalt_api_url), + url=f"{self.cobalt_api_url}/orgs", headers=self.get_headers(), ) # Request assets to validate the org token response_assets = self.session.get( - url="{}/assets".format(self.cobalt_api_url), + url=f"{self.cobalt_api_url}/assets", headers=self.get_headers(), ) @@ -111,7 +107,7 @@ def test_product_connection(self, api_scan_configuration): def get_headers(self): headers = { "accept": "application/vnd.cobalt.v1+json", - "Authorization": "Bearer {}".format(self.api_token), + "Authorization": f"Bearer {self.api_token}", "User-Agent": "DefectDojo", } diff --git a/dojo/tools/api_cobalt/importer.py b/dojo/tools/api_cobalt/importer.py index 93ba6a06e0c..9f3a291f4af 100644 --- a/dojo/tools/api_cobalt/importer.py +++ b/dojo/tools/api_cobalt/importer.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) -class CobaltApiImporter(object): +class CobaltApiImporter: """ Import from Cobalt.io API """ diff --git a/dojo/tools/api_cobalt/parser.py b/dojo/tools/api_cobalt/parser.py index 0e77b0d279b..6be1a4e855a 100644 --- a/dojo/tools/api_cobalt/parser.py +++ b/dojo/tools/api_cobalt/parser.py @@ -8,7 +8,7 @@ SCAN_COBALTIO_API = "Cobalt.io API Import" -class ApiCobaltParser(object): +class ApiCobaltParser: """ Import from Cobalt.io API /findings """ diff --git a/dojo/tools/api_edgescan/api_client.py b/dojo/tools/api_edgescan/api_client.py index 8e2f11ad095..a49c7686ebf 100644 --- a/dojo/tools/api_edgescan/api_client.py +++ b/dojo/tools/api_edgescan/api_client.py @@ -3,7 +3,7 @@ from json.decoder import JSONDecodeError -class EdgescanAPI(object): +class EdgescanAPI: """ A simple client for the Edgescan API """ @@ -17,9 +17,7 @@ def __init__(self, tool_config): self.options = self.get_extra_options(tool_config) else: raise Exception( - "Edgescan Authentication type {} not supported".format( - tool_config.authentication_type - ) + f"Edgescan Authentication type {tool_config.authentication_type} not supported" ) @staticmethod diff --git a/dojo/tools/api_edgescan/importer.py b/dojo/tools/api_edgescan/importer.py index dc97edf82a8..5857b188ab3 100644 --- a/dojo/tools/api_edgescan/importer.py +++ b/dojo/tools/api_edgescan/importer.py @@ -3,7 +3,7 @@ from .api_client import EdgescanAPI -class EdgescanImporter(object): +class EdgescanImporter: """ Import from Edgescan API """ diff --git a/dojo/tools/api_edgescan/parser.py b/dojo/tools/api_edgescan/parser.py index 3e186e6d6b0..8442c7cc666 100644 --- a/dojo/tools/api_edgescan/parser.py +++ b/dojo/tools/api_edgescan/parser.py @@ -8,7 +8,7 @@ SCANTYPE_EDGESCAN = "Edgescan Scan" -class ApiEdgescanParser(object): +class ApiEdgescanParser: """ Import from Edgescan API or JSON file """ diff --git a/dojo/tools/api_sonarqube/importer.py b/dojo/tools/api_sonarqube/importer.py index 31a5c62e77c..6d4d1577f59 100644 --- a/dojo/tools/api_sonarqube/importer.py +++ b/dojo/tools/api_sonarqube/importer.py @@ -14,7 +14,7 @@ logger = logging.getLogger(__name__) -class SonarQubeApiImporter(object): +class SonarQubeApiImporter: """ This class imports from SonarQube (SQ) all open/confirmed SQ issues related to the project related to the test as findings. diff --git a/dojo/tools/api_sonarqube/parser.py b/dojo/tools/api_sonarqube/parser.py index f4e7162d311..8a57a8d80b3 100644 --- a/dojo/tools/api_sonarqube/parser.py +++ b/dojo/tools/api_sonarqube/parser.py @@ -4,7 +4,7 @@ SCAN_SONARQUBE_API = "SonarQube API Import" -class ApiSonarQubeParser(object): +class ApiSonarQubeParser: def get_scan_types(self): return [SCAN_SONARQUBE_API] diff --git a/dojo/tools/api_sonarqube/updater.py b/dojo/tools/api_sonarqube/updater.py index b4815687730..4cbf28dc3e2 100644 --- a/dojo/tools/api_sonarqube/updater.py +++ b/dojo/tools/api_sonarqube/updater.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class SonarQubeApiUpdater(object): +class SonarQubeApiUpdater: """ This class updates in SonarQube, a SonarQube issue previously imported as a DefectDojo Findings. This class maps the finding status to a SQ issue status and later on it transitions the issue @@ -119,9 +119,7 @@ def update_sonarqube_finding(self, finding): return logger.debug( - "Checking if finding '{}' needs to be updated in SonarQube".format( - finding - ) + f"Checking if finding '{finding}' needs to be updated in SonarQube" ) client, _ = SonarQubeApiImporter.prepare_client(finding.test) @@ -142,9 +140,7 @@ def update_sonarqube_finding(self, finding): current_status = issue.get("status") logger.debug( - "--> SQ Current status: {}. Current target status: {}".format( - current_status, target_status - ) + f"--> SQ Current status: {current_status}. Current target status: {target_status}" ) transitions = self.get_sonarqube_required_transitions_for( @@ -152,7 +148,7 @@ def update_sonarqube_finding(self, finding): ) if transitions: logger.info( - "Updating finding '{}' in SonarQube".format(finding) + f"Updating finding '{finding}' in SonarQube" ) for transition in transitions: diff --git a/dojo/tools/api_sonarqube/updater_from_source.py b/dojo/tools/api_sonarqube/updater_from_source.py index 137c55dbd79..ee8fdb33d6a 100644 --- a/dojo/tools/api_sonarqube/updater_from_source.py +++ b/dojo/tools/api_sonarqube/updater_from_source.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) -class SonarQubeApiUpdaterFromSource(object): +class SonarQubeApiUpdaterFromSource: """ The responsibility of this class is to update the Finding status if current SonarQube issue status doesn't match. @@ -40,9 +40,7 @@ def update(self, finding): current_status = issue.get("resolution") or issue.get("status") current_finding_status = self.get_sonarqube_status_for(finding) logger.debug( - "--> SQ Current status: {}. Finding status: {}".format( - current_status, current_finding_status - ) + f"--> SQ Current status: {current_status}. Finding status: {current_finding_status}" ) if ( @@ -50,9 +48,7 @@ def update(self, finding): and current_finding_status != current_status ): logger.info( - "Original SonarQube issue '{}' has changed. Updating DefectDojo finding '{}'...".format( - sonarqube_issue, finding - ) + f"Original SonarQube issue '{sonarqube_issue}' has changed. Updating DefectDojo finding '{finding}'..." ) self.update_finding_status(finding, current_status) diff --git a/dojo/tools/api_vulners/api_client.py b/dojo/tools/api_vulners/api_client.py index c12996abbc5..9441fada342 100644 --- a/dojo/tools/api_vulners/api_client.py +++ b/dojo/tools/api_vulners/api_client.py @@ -16,9 +16,7 @@ def __init__(self, tool_config): self.vulners_api_url = tool_config.url else: raise Exception( - "Vulners.com Authentication type {} not supported".format( - tool_config.authentication_type - ) + f"Vulners.com Authentication type {tool_config.authentication_type} not supported" ) def get_client(self): diff --git a/dojo/tools/api_vulners/importer.py b/dojo/tools/api_vulners/importer.py index fef0d40c669..0017122f97d 100644 --- a/dojo/tools/api_vulners/importer.py +++ b/dojo/tools/api_vulners/importer.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) -class VulnersImporter(object): +class VulnersImporter: """ Import from Vulners API """ diff --git a/dojo/tools/api_vulners/parser.py b/dojo/tools/api_vulners/parser.py index deba3c5762f..50674e5130a 100644 --- a/dojo/tools/api_vulners/parser.py +++ b/dojo/tools/api_vulners/parser.py @@ -17,7 +17,7 @@ } -class ApiVulnersParser(object): +class ApiVulnersParser: """Parser that can load data from Vulners Scanner API""" def get_scan_types(self): diff --git a/dojo/tools/appspider/parser.py b/dojo/tools/appspider/parser.py index 4d3e5eccc7b..6b76df3718a 100644 --- a/dojo/tools/appspider/parser.py +++ b/dojo/tools/appspider/parser.py @@ -4,7 +4,7 @@ from dojo.models import Endpoint, Finding -class AppSpiderParser(object): +class AppSpiderParser: """Parser for Rapid7 AppSpider reports""" def get_scan_types(self): diff --git a/dojo/tools/aqua/parser.py b/dojo/tools/aqua/parser.py index d29d6128a6a..d6ea61edc9a 100644 --- a/dojo/tools/aqua/parser.py +++ b/dojo/tools/aqua/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class AquaParser(object): +class AquaParser: def get_scan_types(self): return ["Aqua Scan"] @@ -61,7 +61,7 @@ def get_item(resource, vuln, test): score = vuln.get("aqua_severity") severity = aqua_severity_of(score) used_for_classification = ( - "Aqua security score ({}) used for classification.\n".format(score) + f"Aqua security score ({score}) used for classification.\n" ) severity_justification = vuln.get("aqua_severity_classification") if "nvd_score_v3" in vuln: @@ -70,17 +70,17 @@ def get_item(resource, vuln, test): if "aqua_score" in vuln: score = vuln.get("aqua_score") used_for_classification = ( - "Aqua score ({}) used for classification.\n".format(score) + f"Aqua score ({score}) used for classification.\n" ) elif "vendor_score" in vuln: score = vuln.get("vendor_score") used_for_classification = ( - "Vendor score ({}) used for classification.\n".format(score) + f"Vendor score ({score}) used for classification.\n" ) elif "nvd_score_v3" in vuln: score = vuln.get("nvd_score_v3") used_for_classification = ( - "NVD score v3 ({}) used for classification.\n".format(score) + f"NVD score v3 ({score}) used for classification.\n" ) severity_justification += "\nNVD v3 vectors: {}".format( vuln.get("nvd_vectors_v3") @@ -90,13 +90,13 @@ def get_item(resource, vuln, test): elif "nvd_score" in vuln: score = vuln.get("nvd_score") used_for_classification = ( - "NVD score v2 ({}) used for classification.\n".format(score) + f"NVD score v2 ({score}) used for classification.\n" ) severity_justification += "\nNVD v2 vectors: {}".format( vuln.get("nvd_vectors") ) severity = severity_of(score) - severity_justification += "\n{}".format(used_for_classification) + severity_justification += f"\n{used_for_classification}" finding = Finding( title=vulnerability_id diff --git a/dojo/tools/arachni/parser.py b/dojo/tools/arachni/parser.py index 22e67fe1b89..334b671b01e 100755 --- a/dojo/tools/arachni/parser.py +++ b/dojo/tools/arachni/parser.py @@ -7,7 +7,7 @@ from dojo.models import Endpoint, Finding -class ArachniParser(object): +class ArachniParser: """Arachni Web Scanner (http://arachni-scanner.com/wiki) Reports are generated with arachni_reporter tool: diff --git a/dojo/tools/asff/parser.py b/dojo/tools/asff/parser.py index c5831d025b3..1f4d96f5679 100644 --- a/dojo/tools/asff/parser.py +++ b/dojo/tools/asff/parser.py @@ -13,7 +13,7 @@ } -class AsffParser(object): +class AsffParser: def get_scan_types(self): return ["AWS Security Finding Format (ASFF) Scan"] diff --git a/dojo/tools/auditjs/parser.py b/dojo/tools/auditjs/parser.py index 69031dc16bb..6249f1b045c 100644 --- a/dojo/tools/auditjs/parser.py +++ b/dojo/tools/auditjs/parser.py @@ -6,7 +6,7 @@ import cvss.parser -class AuditJSParser(object): +class AuditJSParser: """Parser for AuditJS Scan tool""" def get_scan_types(self): diff --git a/dojo/tools/aws_prowler/parser.py b/dojo/tools/aws_prowler/parser.py index b7320039308..9659262fb8b 100644 --- a/dojo/tools/aws_prowler/parser.py +++ b/dojo/tools/aws_prowler/parser.py @@ -10,7 +10,7 @@ from dojo.models import Finding -class AWSProwlerParser(object): +class AWSProwlerParser: def get_scan_types(self): return ["AWS Prowler Scan"] diff --git a/dojo/tools/aws_prowler_v3/parser.py b/dojo/tools/aws_prowler_v3/parser.py index c36c87ad9be..e0e6910fd7c 100644 --- a/dojo/tools/aws_prowler_v3/parser.py +++ b/dojo/tools/aws_prowler_v3/parser.py @@ -7,7 +7,7 @@ from dojo.models import Finding -class AWSProwlerV3Parser(object): +class AWSProwlerV3Parser: SCAN_TYPE = ["AWS Prowler V3"] def get_scan_types(self): diff --git a/dojo/tools/aws_scout2/parser.py b/dojo/tools/aws_scout2/parser.py index 55b6d31afaa..8fd45eba17c 100644 --- a/dojo/tools/aws_scout2/parser.py +++ b/dojo/tools/aws_scout2/parser.py @@ -6,7 +6,7 @@ from html2text import html2text -class AWSScout2Parser(object): +class AWSScout2Parser: # FIXME bad very bad item_data = "" pdepth = 0 @@ -31,20 +31,17 @@ def get_findings(self, filename, test): test_description = "" aws_account_id = data["aws_account_id"] - test_description = "%s **AWS Account:** %s\n" % ( - test_description, - aws_account_id, - ) + test_description = f"{test_description} **AWS Account:** {aws_account_id}\n" last_run = data["last_run"] - test_description = "%s **Ruleset:** %s\n" % ( + test_description = "{} **Ruleset:** {}\n".format( test_description, last_run["ruleset_name"], ) - test_description = "%s **Ruleset Description:** %s\n" % ( + test_description = "{} **Ruleset Description:** {}\n".format( test_description, last_run["ruleset_about"], ) - test_description = "%s **Command:** %s\n" % ( + test_description = "{} **Command:** {}\n".format( test_description, last_run["cmd"], ) @@ -52,27 +49,24 @@ def get_findings(self, filename, test): # Summary for AWS Services test_description = "%s\n**AWS Services** \n\n" % (test_description) for service, items in list(last_run["summary"].items()): - test_description = "%s\n**%s** \n" % ( - test_description, - service.upper(), - ) - test_description = "%s\n* **Checked Items:** %s\n" % ( + test_description = f"{test_description}\n**{service.upper()}** \n" + test_description = "{}\n* **Checked Items:** {}\n".format( test_description, items["checked_items"], ) - test_description = "%s* **Flagged Items:** %s\n" % ( + test_description = "{}* **Flagged Items:** {}\n".format( test_description, items["flagged_items"], ) - test_description = "%s* **Max Level:** %s\n" % ( + test_description = "{}* **Max Level:** {}\n".format( test_description, items["max_level"], ) - test_description = "%s* **Resource Count:** %s\n" % ( + test_description = "{}* **Resource Count:** {}\n".format( test_description, items["resources_count"], ) - test_description = "%s* **Rules Count:** %s\n\n" % ( + test_description = "{}* **Rules Count:** {}\n\n".format( test_description, items["rules_count"], ) @@ -178,7 +172,7 @@ def tabs(n): self.item_data = ( self.item_data + self.formatview(depth) - + "**%s:** %s\n\n" % (key.title(), src) + + f"**{key.title()}:** {src}\n\n" ) else: self.item_data = ( diff --git a/dojo/tools/awssecurityhub/compliance.py b/dojo/tools/awssecurityhub/compliance.py index 3898442d69e..914a196b802 100644 --- a/dojo/tools/awssecurityhub/compliance.py +++ b/dojo/tools/awssecurityhub/compliance.py @@ -2,7 +2,7 @@ from dojo.models import Finding -class Compliance(object): +class Compliance: def get_item(self, finding: dict, test): finding_id = finding.get("Id", "") title = finding.get("Title", "") diff --git a/dojo/tools/awssecurityhub/guardduty.py b/dojo/tools/awssecurityhub/guardduty.py index 3b22498ddc3..7a663dcf1d8 100644 --- a/dojo/tools/awssecurityhub/guardduty.py +++ b/dojo/tools/awssecurityhub/guardduty.py @@ -2,7 +2,7 @@ from dojo.models import Finding, Endpoint -class GuardDuty(object): +class GuardDuty: def get_item(self, finding: dict, test): finding_id = finding.get("Id", "") title = finding.get("Title", "") diff --git a/dojo/tools/awssecurityhub/inspector.py b/dojo/tools/awssecurityhub/inspector.py index 2c4c79db4ed..ce0b7701adb 100644 --- a/dojo/tools/awssecurityhub/inspector.py +++ b/dojo/tools/awssecurityhub/inspector.py @@ -2,7 +2,7 @@ from dojo.models import Finding, Endpoint -class Inspector(object): +class Inspector: def get_item(self, finding: dict, test): finding_id = finding.get("Id", "") title = finding.get("Title", "") diff --git a/dojo/tools/awssecurityhub/parser.py b/dojo/tools/awssecurityhub/parser.py index 7380ece6954..d7110c6daf7 100644 --- a/dojo/tools/awssecurityhub/parser.py +++ b/dojo/tools/awssecurityhub/parser.py @@ -4,7 +4,7 @@ from dojo.tools.awssecurityhub.compliance import Compliance -class AwsSecurityHubParser(object): +class AwsSecurityHubParser: def get_scan_types(self): return ["AWS Security Hub Scan"] diff --git a/dojo/tools/azure_security_center_recommendations/parser.py b/dojo/tools/azure_security_center_recommendations/parser.py index 9d90519fb21..e2e9faf5ebe 100644 --- a/dojo/tools/azure_security_center_recommendations/parser.py +++ b/dojo/tools/azure_security_center_recommendations/parser.py @@ -5,7 +5,7 @@ from dojo.models import Finding -class AzureSecurityCenterRecommendationsParser(object): +class AzureSecurityCenterRecommendationsParser: def get_scan_types(self): return ["Azure Security Center Recommendations Scan"] diff --git a/dojo/tools/bandit/parser.py b/dojo/tools/bandit/parser.py index 18b03967ad2..e1f83277563 100644 --- a/dojo/tools/bandit/parser.py +++ b/dojo/tools/bandit/parser.py @@ -4,7 +4,7 @@ from dojo.models import Finding -class BanditParser(object): +class BanditParser: def get_scan_types(self): return ["Bandit Scan"] diff --git a/dojo/tools/bearer_cli/parser.py b/dojo/tools/bearer_cli/parser.py index 9c0126c3a6c..856b752c28f 100644 --- a/dojo/tools/bearer_cli/parser.py +++ b/dojo/tools/bearer_cli/parser.py @@ -2,7 +2,7 @@ from dojo.models import Finding -class BearerParser(object): +class BearerParser: """ Bearer CLI tool is a SAST scanner for multiple languages """ diff --git a/dojo/tools/blackduck/importer.py b/dojo/tools/blackduck/importer.py index 6cf5eb95d94..8ab2c0d89d6 100644 --- a/dojo/tools/blackduck/importer.py +++ b/dojo/tools/blackduck/importer.py @@ -33,7 +33,7 @@ def _process_csvfile(self, report): No file information then. """ security_issues = dict() - with open(str(report), "r") as f: + with open(str(report)) as f: security_issues = self.__partition_by_key(f) project_ids = set(security_issues.keys()) @@ -80,7 +80,7 @@ def _process_project_findings( path = file_entry_dict.get("Path") archive_context = file_entry_dict.get("Archive context") if archive_context: - full_path = "{}{}".format(archive_context, path[1:]) + full_path = f"{archive_context}{path[1:]}" else: full_path = path diff --git a/dojo/tools/blackduck/parser.py b/dojo/tools/blackduck/parser.py index 804bb1bf5a7..94c35e9881a 100644 --- a/dojo/tools/blackduck/parser.py +++ b/dojo/tools/blackduck/parser.py @@ -4,7 +4,7 @@ from .importer import BlackduckImporter -class BlackduckParser(object): +class BlackduckParser: """ Can import as exported from Blackduck: - from a zip file containing a security.csv and files.csv @@ -45,16 +45,14 @@ def ingest_findings(self, normalized_findings, test): references = self.format_reference(i) dupe_key = hashlib.md5( - "{} | {}".format(title, i.vuln_source).encode("utf-8") + f"{title} | {i.vuln_source}".encode() ).hexdigest() if dupe_key in dupes: finding = dupes[dupe_key] if finding.description: finding.description += ( - "Vulnerability ID: {}\n {}\n".format( - vulnerability_id, i.vuln_source - ) + f"Vulnerability ID: {vulnerability_id}\n {i.vuln_source}\n" ) dupes[dupe_key] = finding else: @@ -87,31 +85,27 @@ def format_title(self, i): else: component_title = i.component_origin_id - return "{} - {}".format(i.vuln_id, component_title) + return f"{i.vuln_id} - {component_title}" def format_description(self, i): - description = "Published on: {}\n\n".format(str(i.published_date)) - description += "Updated on: {}\n\n".format(str(i.updated_date)) - description += "Base score: {}\n\n".format(str(i.base_score)) - description += "Exploitability: {}\n\n".format(str(i.exploitability)) - description += "Description: {}\n".format(i.description) + description = f"Published on: {str(i.published_date)}\n\n" + description += f"Updated on: {str(i.updated_date)}\n\n" + description += f"Base score: {str(i.base_score)}\n\n" + description += f"Exploitability: {str(i.exploitability)}\n\n" + description += f"Description: {i.description}\n" return description def format_mitigation(self, i): - mitigation = "Remediation status: {}\n".format(i.remediation_status) - mitigation += "Remediation target date: {}\n".format( - i.remediation_target_date - ) - mitigation += "Remediation actual date: {}\n".format( - i.remediation_actual_date - ) - mitigation += "Remediation comment: {}\n".format(i.remediation_comment) + mitigation = f"Remediation status: {i.remediation_status}\n" + mitigation += f"Remediation target date: {i.remediation_target_date}\n" + mitigation += f"Remediation actual date: {i.remediation_actual_date}\n" + mitigation += f"Remediation comment: {i.remediation_comment}\n" return mitigation def format_reference(self, i): - reference = "Source: {}\n".format(i.vuln_source) - reference += "URL: {}\n".format(i.url) + reference = f"Source: {i.vuln_source}\n" + reference += f"URL: {i.url}\n" return reference diff --git a/dojo/tools/blackduck_binary_analysis/importer.py b/dojo/tools/blackduck_binary_analysis/importer.py index fcbe4d49a88..3060838d7e8 100644 --- a/dojo/tools/blackduck_binary_analysis/importer.py +++ b/dojo/tools/blackduck_binary_analysis/importer.py @@ -27,7 +27,7 @@ def _process_csvfile(self, report, orig_report_name): If passed a CSV file, process. """ vulnerabilities = dict() - with open(str(report), "r") as f: + with open(str(report)) as f: vulnerabilities = self.__partition_by_key(f) sha1_hash_keys = set(vulnerabilities.keys()) diff --git a/dojo/tools/blackduck_binary_analysis/parser.py b/dojo/tools/blackduck_binary_analysis/parser.py index 55049312569..baab6cd9352 100644 --- a/dojo/tools/blackduck_binary_analysis/parser.py +++ b/dojo/tools/blackduck_binary_analysis/parser.py @@ -5,7 +5,7 @@ from cvss import CVSS2, CVSS3 -class BlackduckBinaryAnalysisParser(object): +class BlackduckBinaryAnalysisParser: """ Report type(s) from Blackduck Binary Analysis compatible with DefectDojo: - Single CSV file containing vulnerable components @@ -66,7 +66,7 @@ def ingest_findings(self, sorted_findings, test): references = self.format_references(i) unique_finding_key = hashlib.sha256( - "{}".format(file_path + object_sha1 + title).encode("utf-8") + f"{file_path + object_sha1 + title}".encode() ).hexdigest() if unique_finding_key in findings: @@ -105,11 +105,7 @@ def ingest_findings(self, sorted_findings, test): return findings.values() def format_title(self, i): - title = "{}: {} {} Vulnerable".format( - i.object_name, - i.component, - i.version, - ) + title = f"{i.object_name}: {i.component} {i.version} Vulnerable" if i.cve is not None: title += f" to {i.cve}" @@ -117,47 +113,30 @@ def format_title(self, i): return title def format_description(self, i): - description = "CSV Result: {}\n".format(str(i.report_name)) - description += "Vulnerable Component: {}\n".format(str(i.component)) - description += "Vulnerable Component Version in Use: {}\n".format(str(i.version)) - description += "Vulnerable Component Latest Version: {}\n".format( - str(i.latest_version) - ) - description += "Matching Type: {}\n".format(str(i.matching_type)) - description += "Object Name: {}\n".format( - str(i.object_name) - ) - description += "Object Extraction Path: {}\n".format( - str(i.object_full_path) - ) - description += "Object Compilation Date: {}\n".format( - str(i.object_compilation_date) - ) - description += "Object SHA1: {}\n".format(str(i.object_sha1)) - description += "CVE: {}\n".format(str(i.cve)) - description += "CVE Publication Date: {}\n".format( - str(i.cve_publication_date) - ) - description += "Distribution Package: {}\n".format( - str(i.distribution_package) - ) - description += "Missing Exploit Mitigations: {}\n".format( - str(i.missing_exploit_mitigations) - ) - description += "BDSA: {}\n".format(str(i.bdsa)) - description += "Summary:\n{}\n".format(str(i.summary)) - description += "Note Type:\n{}\n".format(str(i.note_type)) - description += "Note Reason:\n{}\n".format(str(i.note_reason)) - description += "Triage Vectors:\n{}\n".format(str(i.triage_vectors)) - description += "Unresolving Triage Vectors:\n{}\n".format(str(i.triage_vectors)) + description = f"CSV Result: {str(i.report_name)}\n" + description += f"Vulnerable Component: {str(i.component)}\n" + description += f"Vulnerable Component Version in Use: {str(i.version)}\n" + description += f"Vulnerable Component Latest Version: {str(i.latest_version)}\n" + description += f"Matching Type: {str(i.matching_type)}\n" + description += f"Object Name: {str(i.object_name)}\n" + description += f"Object Extraction Path: {str(i.object_full_path)}\n" + description += f"Object Compilation Date: {str(i.object_compilation_date)}\n" + description += f"Object SHA1: {str(i.object_sha1)}\n" + description += f"CVE: {str(i.cve)}\n" + description += f"CVE Publication Date: {str(i.cve_publication_date)}\n" + description += f"Distribution Package: {str(i.distribution_package)}\n" + description += f"Missing Exploit Mitigations: {str(i.missing_exploit_mitigations)}\n" + description += f"BDSA: {str(i.bdsa)}\n" + description += f"Summary:\n{str(i.summary)}\n" + description += f"Note Type:\n{str(i.note_type)}\n" + description += f"Note Reason:\n{str(i.note_reason)}\n" + description += f"Triage Vectors:\n{str(i.triage_vectors)}\n" + description += f"Unresolving Triage Vectors:\n{str(i.triage_vectors)}\n" return description def format_mitigation(self, i): - mitigation = "Upgrade {} to latest version: {}.\n".format( - str(i.component), - str(i.latest_version) - ) + mitigation = f"Upgrade {str(i.component)} to latest version: {str(i.latest_version)}.\n" return mitigation @@ -171,7 +150,7 @@ def format_impact(self, i): return impact def format_references(self, i): - references = "BDSA: {}\n".format(str(i.bdsa)) - references += "NIST CVE Details: {}\n".format(str(i.vulnerability_url)) + references = f"BDSA: {str(i.bdsa)}\n" + references += f"NIST CVE Details: {str(i.vulnerability_url)}\n" return references diff --git a/dojo/tools/blackduck_component_risk/importer.py b/dojo/tools/blackduck_component_risk/importer.py index c1c26d8dc4e..da1f8c53cc7 100644 --- a/dojo/tools/blackduck_component_risk/importer.py +++ b/dojo/tools/blackduck_component_risk/importer.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class BlackduckCRImporter(object): +class BlackduckCRImporter: """ Importer for blackduck. V3 is different in that it creates a Finding in defect dojo for each vulnerable component version used in a project, for each license that is diff --git a/dojo/tools/blackduck_component_risk/parser.py b/dojo/tools/blackduck_component_risk/parser.py index 644d525bcd5..7f6916962b0 100644 --- a/dojo/tools/blackduck_component_risk/parser.py +++ b/dojo/tools/blackduck_component_risk/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class BlackduckComponentRiskParser(object): +class BlackduckComponentRiskParser: """ Can import as exported from Blackduck: - from a zip file containing a security.csv, sources.csv and components.csv diff --git a/dojo/tools/brakeman/parser.py b/dojo/tools/brakeman/parser.py index 77e32603f1a..50d130a13fb 100644 --- a/dojo/tools/brakeman/parser.py +++ b/dojo/tools/brakeman/parser.py @@ -7,7 +7,7 @@ from dojo.models import Finding -class BrakemanParser(object): +class BrakemanParser: def get_scan_types(self): return ["Brakeman Scan"] diff --git a/dojo/tools/bugcrowd/parser.py b/dojo/tools/bugcrowd/parser.py index 941b55fd694..1414e711a1b 100644 --- a/dojo/tools/bugcrowd/parser.py +++ b/dojo/tools/bugcrowd/parser.py @@ -6,7 +6,7 @@ from dojo.models import Endpoint, Finding -class BugCrowdParser(object): +class BugCrowdParser: def get_scan_types(self): return ["BugCrowd Scan"] diff --git a/dojo/tools/bundler_audit/parser.py b/dojo/tools/bundler_audit/parser.py index 8d567af2ec5..a098e6e3e63 100644 --- a/dojo/tools/bundler_audit/parser.py +++ b/dojo/tools/bundler_audit/parser.py @@ -6,7 +6,7 @@ from dojo.models import Finding -class BundlerAuditParser(object): +class BundlerAuditParser: def get_scan_types(self): return ["Bundler-Audit Scan"] diff --git a/dojo/tools/burp/parser.py b/dojo/tools/burp/parser.py index bd599598672..82b24118d7b 100755 --- a/dojo/tools/burp/parser.py +++ b/dojo/tools/burp/parser.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -class BurpParser(object): +class BurpParser: """ The objective of this class is to parse an xml file generated by the burp tool. diff --git a/dojo/tools/burp_api/parser.py b/dojo/tools/burp_api/parser.py index 7d62f9b637a..01f30faffba 100644 --- a/dojo/tools/burp_api/parser.py +++ b/dojo/tools/burp_api/parser.py @@ -15,7 +15,7 @@ """ -class BurpApiParser(object): +class BurpApiParser: """Parser that can load data from Burp API""" def get_scan_types(self): diff --git a/dojo/tools/burp_dastardly/parser.py b/dojo/tools/burp_dastardly/parser.py index e546c83978c..70ee436a813 100755 --- a/dojo/tools/burp_dastardly/parser.py +++ b/dojo/tools/burp_dastardly/parser.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) -class BurpDastardlyParser(object): +class BurpDastardlyParser: def get_scan_types(self): return ["Burp Dastardly Scan"] diff --git a/dojo/tools/burp_enterprise/parser.py b/dojo/tools/burp_enterprise/parser.py index b80e0c54b7d..b54603ea995 100644 --- a/dojo/tools/burp_enterprise/parser.py +++ b/dojo/tools/burp_enterprise/parser.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -class BurpEnterpriseParser(object): +class BurpEnterpriseParser: def get_scan_types(self): return ["Burp Enterprise Scan"] diff --git a/dojo/tools/burp_graphql/parser.py b/dojo/tools/burp_graphql/parser.py index 90d91c640c3..34ebfbdbdf6 100644 --- a/dojo/tools/burp_graphql/parser.py +++ b/dojo/tools/burp_graphql/parser.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) -class BurpGraphQLParser(object): +class BurpGraphQLParser: def get_scan_types(self): return ["Burp GraphQL API"] diff --git a/dojo/tools/cargo_audit/parser.py b/dojo/tools/cargo_audit/parser.py index fddf3be36a0..ccff968cc68 100644 --- a/dojo/tools/cargo_audit/parser.py +++ b/dojo/tools/cargo_audit/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class CargoAuditParser(object): +class CargoAuditParser: """ A class that can be used to parse the cargo audit JSON report file """ diff --git a/dojo/tools/checkmarx/parser.py b/dojo/tools/checkmarx/parser.py index 1c2a7220cef..3f67f97a51f 100755 --- a/dojo/tools/checkmarx/parser.py +++ b/dojo/tools/checkmarx/parser.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) -class CheckmarxParser(object): +class CheckmarxParser: def get_scan_types(self): return ["Checkmarx Scan", "Checkmarx Scan detailed"] @@ -68,23 +68,17 @@ def _get_findings_xml(self, filename, test): for result in query.findall("Result"): if categories is not None: - findingdetail = "{}**Category:** {}\n".format( - findingdetail, categories - ) + findingdetail = f"{findingdetail}**Category:** {categories}\n" if language is not None: - findingdetail = "{}**Language:** {}\n".format( - findingdetail, language - ) + findingdetail = f"{findingdetail}**Language:** {language}\n" if language not in language_list: language_list[language] = 1 else: language_list[language] = language_list[language] + 1 if group is not None: - findingdetail = "{}**Group:** {}\n".format( - findingdetail, group - ) + findingdetail = f"{findingdetail}**Group:** {group}\n" if result.get("Status") is not None: findingdetail = "{}**Status:** {}\n".format( @@ -94,9 +88,7 @@ def _get_findings_xml(self, filename, test): deeplink = "[{}]({})".format( result.get("DeepLink"), result.get("DeepLink") ) - findingdetail = "{}**Finding Link:** {}\n".format( - findingdetail, deeplink - ) + findingdetail = f"{findingdetail}**Finding Link:** {deeplink}\n" if self.mode == "detailed": self._process_result_detailed( @@ -154,7 +146,7 @@ def _process_result_file_name_aggregated( title = titleStart false_p = result.get("FalsePositive") sev = result.get("Severity") - aggregateKeys = "{}{}{}".format(cwe, sev, sinkFilename) + aggregateKeys = f"{cwe}{sev}{sinkFilename}" state = result.get("state") active = self.isActive(state) verified = self.isVerified(state) @@ -190,16 +182,8 @@ def _process_result_file_name_aggregated( find = dupes[aggregateKeys] find.nb_occurences = find.nb_occurences + 1 if find.nb_occurences == 2: - find.description = "### 1. {}\n{}".format( - find.title, find.description - ) - find.description = "{}\n\n-----\n### {}. {}\n{}\n{}".format( - find.description, - find.nb_occurences, - title, - findingdetail, - description, - ) + find.description = f"### 1. {find.title}\n{find.description}" + find.description = f"{find.description}\n\n-----\n### {find.nb_occurences}. {title}\n{findingdetail}\n{description}" if queryId not in vuln_ids_from_tool[aggregateKeys]: vuln_ids_from_tool[aggregateKeys].append(queryId) # If at least one of the findings in the aggregate is exploitable, @@ -236,12 +220,8 @@ def get_description_file_name_aggregated(self, query, result): sinkFilename, sinkLineNumber, sinkObject = self.get_pathnode_elements( pathnode ) - description = "Source file: {} (line {})\nSource object: {}".format( - sourceFilename, sourceLineNumber, sourceObject - ) - description = "{}\nSink file: {} (line {})\nSink object: {}".format( - description, sinkFilename, sinkLineNumber, sinkObject - ) + description = f"Source file: {sourceFilename} (line {sourceLineNumber})\nSource object: {sourceObject}" + description = f"{description}\nSink file: {sinkFilename} (line {sinkLineNumber})\nSink object: {sinkObject}" return description, pathnode def _process_result_detailed( @@ -273,7 +253,7 @@ def _process_result_detailed( similarityId = str(path.get("SimilarityId")) path_id = str(path.get("PathId")) pathId = similarityId + path_id - findingdetail = "{}-----\n".format(findingdetail) + findingdetail = f"{findingdetail}-----\n" # Loop over function calls / assignments in the data flow graph for pathnode in path.findall("PathNode"): findingdetail = self.get_description_detailed( @@ -294,9 +274,7 @@ def _process_result_detailed( ) = self.get_pathnode_elements(pathnode) # pathId is the unique id from tool which means that there is # basically no aggregation except real duplicates - aggregateKeys = "{}{}{}{}{}".format( - categories, cwe, name, sinkFilename, pathId - ) + aggregateKeys = f"{categories}{cwe}{name}{sinkFilename}{pathId}" if title and sinkFilename: title = "{} ({})".format(title, sinkFilename.split("/")[-1]) @@ -355,7 +333,7 @@ def get_description_detailed(self, pathnode, findingdetail): codefragment.find("Code").text.strip(), ) - findingdetail = "{}-----\n".format(findingdetail) + findingdetail = f"{findingdetail}-----\n" return findingdetail # Get name, cwe and categories from the global query tag (1 query = 1 type diff --git a/dojo/tools/checkmarx_one/parser.py b/dojo/tools/checkmarx_one/parser.py index 699ac64e42a..8769a2220fc 100644 --- a/dojo/tools/checkmarx_one/parser.py +++ b/dojo/tools/checkmarx_one/parser.py @@ -4,7 +4,7 @@ from dojo.models import Finding -class CheckmarxOneParser(object): +class CheckmarxOneParser: def get_scan_types(self): return ["Checkmarx One Scan"] diff --git a/dojo/tools/checkmarx_osa/parser.py b/dojo/tools/checkmarx_osa/parser.py index 30ae18e0f45..c61ce2f8689 100644 --- a/dojo/tools/checkmarx_osa/parser.py +++ b/dojo/tools/checkmarx_osa/parser.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) -class CheckmarxOsaParser(object): +class CheckmarxOsaParser: def get_scan_types(self): return ["Checkmarx OSA"] @@ -51,7 +51,7 @@ def get_findings(self, filehandle, test): status = item["state"]["name"] vulnerability_id = item.get("cveName", "NC") finding_item = Finding( - title="{0} {1} | {2}".format( + title="{} {} | {}".format( library["name"], library["version"], vulnerability_id ), severity=item["severity"]["name"], diff --git a/dojo/tools/checkov/parser.py b/dojo/tools/checkov/parser.py index c98e94537fa..ad4878d389e 100644 --- a/dojo/tools/checkov/parser.py +++ b/dojo/tools/checkov/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class CheckovParser(object): +class CheckovParser: def get_scan_types(self): return ["Checkov Scan"] diff --git a/dojo/tools/chefinspect/parser.py b/dojo/tools/chefinspect/parser.py index adf85eb5eaa..30feaa586e3 100644 --- a/dojo/tools/chefinspect/parser.py +++ b/dojo/tools/chefinspect/parser.py @@ -2,7 +2,7 @@ from dojo.models import Finding -class ChefInspectParser(object): +class ChefInspectParser: def get_scan_types(self): return ["Chef Inspect Log"] diff --git a/dojo/tools/clair/clair_parser.py b/dojo/tools/clair/clair_parser.py index 55fc94ad392..381a1b97ede 100644 --- a/dojo/tools/clair/clair_parser.py +++ b/dojo/tools/clair/clair_parser.py @@ -3,7 +3,7 @@ logger = logging.getLogger(__name__) -class ClairScan(object): +class ClairScan: def get_items_clair(self, tree, test): items = {} for node in tree: diff --git a/dojo/tools/clair/clairklar_parser.py b/dojo/tools/clair/clairklar_parser.py index 5a24dbb05f9..263c18872fa 100644 --- a/dojo/tools/clair/clairklar_parser.py +++ b/dojo/tools/clair/clairklar_parser.py @@ -3,7 +3,7 @@ logger = logging.getLogger(__name__) -class ClairKlarScan(object): +class ClairKlarScan: def get_items_klar(self, tree, test): items = list() clair_severities = [ diff --git a/dojo/tools/clair/parser.py b/dojo/tools/clair/parser.py index b0701de287f..269bbcf5a05 100644 --- a/dojo/tools/clair/parser.py +++ b/dojo/tools/clair/parser.py @@ -3,7 +3,7 @@ from dojo.tools.clair.clairklar_parser import ClairKlarScan -class ClairParser(object): +class ClairParser: def get_scan_types(self): return ["Clair Scan"] diff --git a/dojo/tools/cloudsploit/parser.py b/dojo/tools/cloudsploit/parser.py index 38e518fc6ed..b7b7d346e1c 100644 --- a/dojo/tools/cloudsploit/parser.py +++ b/dojo/tools/cloudsploit/parser.py @@ -8,7 +8,7 @@ # from urllib.parse import urlparse -class CloudsploitParser(object): +class CloudsploitParser: """ AquaSecurity CloudSploit https://github.com/aquasecurity/cloudsploit """ diff --git a/dojo/tools/cobalt/parser.py b/dojo/tools/cobalt/parser.py index 172982dd67d..11592d2ab9c 100644 --- a/dojo/tools/cobalt/parser.py +++ b/dojo/tools/cobalt/parser.py @@ -7,7 +7,7 @@ __author__ = "dr3dd589" -class CobaltParser(object): +class CobaltParser: def get_scan_types(self): return ["Cobalt.io Scan"] diff --git a/dojo/tools/codechecker/parser.py b/dojo/tools/codechecker/parser.py index 4866145c02e..f73302e7da0 100644 --- a/dojo/tools/codechecker/parser.py +++ b/dojo/tools/codechecker/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class CodeCheckerParser(object): +class CodeCheckerParser: def get_scan_types(self): return ["Codechecker Report native"] @@ -48,7 +48,7 @@ def get_item(vuln): if "type" in vuln: vuln_type = vuln.get("type", "None") if vuln_type != "None": - description += "Type: {}\n".format(vuln_type) + description += f"Type: {vuln_type}\n" if "message" in vuln: description += "{}\n".format(vuln["message"]) @@ -57,15 +57,13 @@ def get_item(vuln): file_path = location["path"] if "path" in location else None if file_path: - description += "File path: {}\n".format(file_path) + description += f"File path: {file_path}\n" line = vuln["line"] if "line" in vuln else None column = vuln["column"] if "column" in vuln else None if line is not None and column is not None: - description += "Location in file: line {}, column {}\n".format( - line, column - ) + description += f"Location in file: line {line}, column {column}\n" sast_source_line = line diff --git a/dojo/tools/contrast/parser.py b/dojo/tools/contrast/parser.py index f689b392dac..e5352606581 100644 --- a/dojo/tools/contrast/parser.py +++ b/dojo/tools/contrast/parser.py @@ -7,7 +7,7 @@ from dojo.models import Endpoint, Finding -class ContrastParser(object): +class ContrastParser: """Contrast Scanner CSV Report""" def get_scan_types(self): @@ -80,7 +80,7 @@ def get_findings(self, filename, test): ) dupe_key = hashlib.sha256( - f"{finding.vuln_id_from_tool}".encode("utf-8") + f"{finding.vuln_id_from_tool}".encode() ).digest() if dupe_key in dupes: diff --git a/dojo/tools/coverity_api/parser.py b/dojo/tools/coverity_api/parser.py index e25f819a8df..38c361e882b 100644 --- a/dojo/tools/coverity_api/parser.py +++ b/dojo/tools/coverity_api/parser.py @@ -4,7 +4,7 @@ from dojo.models import Finding -class CoverityApiParser(object): +class CoverityApiParser: """Parser that can load data from Synopsys Coverity API""" def get_scan_types(self): diff --git a/dojo/tools/crashtest_security/parser.py b/dojo/tools/crashtest_security/parser.py index 0ac2b37c0b4..efe086ed4f1 100755 --- a/dojo/tools/crashtest_security/parser.py +++ b/dojo/tools/crashtest_security/parser.py @@ -8,7 +8,7 @@ from dojo.models import Finding -class CrashtestSecurityJsonParser(object): +class CrashtestSecurityJsonParser: """ The objective of this class is to parse a json file generated by the crashtest security suite. @@ -141,7 +141,7 @@ def get_severity(self, cvss_base_score): return "Critical" -class CrashtestSecurityXmlParser(object): +class CrashtestSecurityXmlParser: """ The objective of this class is to parse an xml file generated by the crashtest security suite. @@ -224,7 +224,7 @@ def get_items(self, tree, test): return items -class CrashtestSecurityParser(object): +class CrashtestSecurityParser: """SSLYze support JSON and XML""" def get_scan_types(self): diff --git a/dojo/tools/cred_scan/parser.py b/dojo/tools/cred_scan/parser.py index 2a2e616f44d..9a8ab21e5df 100644 --- a/dojo/tools/cred_scan/parser.py +++ b/dojo/tools/cred_scan/parser.py @@ -4,7 +4,7 @@ from dojo.models import Finding -class CredScanParser(object): +class CredScanParser: """ Credential Scanner (aka CredScan) is a tool developed and maintained by Microsoft to identify credential leaks such as those in source code and diff --git a/dojo/tools/crunch42/parser.py b/dojo/tools/crunch42/parser.py index e1a841e29a3..fade8ddb42e 100644 --- a/dojo/tools/crunch42/parser.py +++ b/dojo/tools/crunch42/parser.py @@ -2,7 +2,7 @@ from dojo.models import Finding -class Crunch42Parser(object): +class Crunch42Parser: def get_scan_types(self): return ["Crunch42 Scan"] diff --git a/dojo/tools/cyclonedx/helpers.py b/dojo/tools/cyclonedx/helpers.py index ed64843c7d2..98a76bfe105 100644 --- a/dojo/tools/cyclonedx/helpers.py +++ b/dojo/tools/cyclonedx/helpers.py @@ -3,7 +3,7 @@ LOGGER = logging.getLogger(__name__) -class Cyclonedxhelper(object): +class Cyclonedxhelper: def _get_cvssv3(self, raw_vector): if raw_vector is None or "" == raw_vector: return None diff --git a/dojo/tools/cyclonedx/json_parser.py b/dojo/tools/cyclonedx/json_parser.py index 1c4b0490b49..9ee31275fc9 100644 --- a/dojo/tools/cyclonedx/json_parser.py +++ b/dojo/tools/cyclonedx/json_parser.py @@ -6,7 +6,7 @@ LOGGER = logging.getLogger(__name__) -class CycloneDXJSONParser(object): +class CycloneDXJSONParser: def _get_findings_json(self, file, test): """Load a CycloneDX file in JSON format""" data = json.load(file) @@ -126,9 +126,7 @@ def _get_findings_json(self, file, test): if detail: finding.mitigation = ( finding.mitigation - + "\n**This vulnerability is mitigated and/or suppressed:** {}\n".format( - detail - ) + + f"\n**This vulnerability is mitigated and/or suppressed:** {detail}\n" ) findings.append(finding) return findings diff --git a/dojo/tools/cyclonedx/parser.py b/dojo/tools/cyclonedx/parser.py index dfb01b8a38e..8fe80a51136 100644 --- a/dojo/tools/cyclonedx/parser.py +++ b/dojo/tools/cyclonedx/parser.py @@ -2,7 +2,7 @@ from dojo.tools.cyclonedx.xml_parser import CycloneDXXMLParser -class CycloneDXParser(object): +class CycloneDXParser: """CycloneDX is a lightweight software bill of materials (SBOM) standard designed for use in application security contexts and supply chain component analysis. https://www.cyclonedx.org/ diff --git a/dojo/tools/cyclonedx/xml_parser.py b/dojo/tools/cyclonedx/xml_parser.py index 5e0bda3985b..517dbc54780 100644 --- a/dojo/tools/cyclonedx/xml_parser.py +++ b/dojo/tools/cyclonedx/xml_parser.py @@ -7,7 +7,7 @@ LOGGER = logging.getLogger(__name__) -class CycloneDXXMLParser(object): +class CycloneDXXMLParser: def _get_findings_xml(self, file, test): nscan = ElementTree.parse(file) root = nscan.getroot() @@ -294,9 +294,7 @@ def _manage_vulnerability_xml( if detail: finding.mitigation = ( finding.mitigation - + "\n**This vulnerability is mitigated and/or suppressed:** {}\n".format( - detail - ) + + f"\n**This vulnerability is mitigated and/or suppressed:** {detail}\n" ) findings.append(finding) return findings diff --git a/dojo/tools/dawnscanner/parser.py b/dojo/tools/dawnscanner/parser.py index e191d2da062..98b91d36ad9 100644 --- a/dojo/tools/dawnscanner/parser.py +++ b/dojo/tools/dawnscanner/parser.py @@ -5,7 +5,7 @@ from dojo.models import Finding -class DawnScannerParser(object): +class DawnScannerParser: CVE_REGEX = re.compile(r"CVE-\d{4}-\d{4,7}") def get_scan_types(self): diff --git a/dojo/tools/dependency_check/parser.py b/dojo/tools/dependency_check/parser.py index 89b634d13c6..01ff28e2fd9 100644 --- a/dojo/tools/dependency_check/parser.py +++ b/dojo/tools/dependency_check/parser.py @@ -13,7 +13,7 @@ logger = logging.getLogger(__name__) -class DependencyCheckParser(object): +class DependencyCheckParser: SEVERITY_MAPPING = { "info": "Info", "low": "Low", @@ -232,9 +232,7 @@ def get_finding_from_vulnerability( if component_name is None: logger.warning( - "component_name was None for File: {}, using dependency file name instead.".format( - dependency_filename - ) + f"component_name was None for File: {dependency_filename}, using dependency file name instead." ) component_name = dependency_filename @@ -296,18 +294,14 @@ def get_finding_from_vulnerability( if related_dependency is not None: tags.append("related") - if vulnerability.tag == "{}suppressedVulnerability".format(namespace): + if vulnerability.tag == f"{namespace}suppressedVulnerability": if notes is None: notes = "Document on why we are suppressing this vulnerability is missing!" tags.append("no_suppression_document") - mitigation = "**This vulnerability is mitigated and/or suppressed:** {}\n".format( - notes - ) + mitigation = f"**This vulnerability is mitigated and/or suppressed:** {notes}\n" mitigation = ( mitigation - + "Update {}:{} to at least the version recommended in the description".format( - component_name, component_version - ) + + f"Update {component_name}:{component_version} to at least the version recommended in the description" ) mitigated = datetime.utcnow() is_Mitigated = True @@ -315,9 +309,7 @@ def get_finding_from_vulnerability( tags.append("suppressed") else: - mitigation = "Update {}:{} to at least the version recommended in the description".format( - component_name, component_version - ) + mitigation = f"Update {component_name}:{component_version} to at least the version recommended in the description" description += "\n**Filepath:** " + str(dependency_filepath) active = True diff --git a/dojo/tools/dependency_track/parser.py b/dojo/tools/dependency_track/parser.py index 965e3e32362..c4e3dad351d 100644 --- a/dojo/tools/dependency_track/parser.py +++ b/dojo/tools/dependency_track/parser.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) -class DependencyTrackParser(object): +class DependencyTrackParser: """ A class that can be used to parse the JSON Finding Packaging Format (FPF) export from OWASP Dependency Track. @@ -138,8 +138,7 @@ def _convert_dependency_track_finding_to_dojo_finding(self, dependency_track_fin else: version_description = '' - title = "{component_name}:{version_description} affected by: {vuln_id} ({source})"\ - .format(vuln_id=vuln_id, source=source, version_description=version_description, component_name=component_name) + title = f"{component_name}:{version_description} affected by: {vuln_id} ({source})" # We should collect all the vulnerability ids, the FPF format can add additional IDs as aliases # we add these aliases in the vulnerability_id list making sure duplicate findings get correctly deduplicated @@ -168,17 +167,16 @@ def _convert_dependency_track_finding_to_dojo_finding(self, dependency_track_fin # Build the description of the Dojo finding # We already know (from above) that the version information is not always present if component_version is not None: - component_description = "Version {component_version} of the {component_name} component".format(component_version=component_version, component_name=component_name) + component_description = f"Version {component_version} of the {component_name} component" else: - component_description = "The {component_name} component".format(component_name=component_name) + component_description = f"The {component_name} component" vulnerability_description = "You are using a component with a known vulnerability. " \ - "{component_description} is affected by the vulnerability with an id of {vuln_id} as " \ - "identified by {source}." \ - .format(component_description=component_description, vuln_id=vuln_id, source=source) + f"{component_description} is affected by the vulnerability with an id of {vuln_id} as " \ + f"identified by {source}." # Append purl info if it is present if 'purl' in dependency_track_finding['component'] and dependency_track_finding['component']['purl'] is not None: component_purl = dependency_track_finding['component']['purl'] - vulnerability_description = vulnerability_description + "\nThe purl of the affected component is: {purl}.".format(purl=component_purl) + vulnerability_description = vulnerability_description + f"\nThe purl of the affected component is: {component_purl}." # there is no file_path in the report, but defect dojo needs it otherwise it skips deduplication: # see https://github.com/DefectDojo/django-DefectDojo/issues/3647 # might be no longer needed in the future, and is not needed if people use the default diff --git a/dojo/tools/detect_secrets/parser.py b/dojo/tools/detect_secrets/parser.py index 0da274ba9f7..3e450a02a1c 100644 --- a/dojo/tools/detect_secrets/parser.py +++ b/dojo/tools/detect_secrets/parser.py @@ -4,7 +4,7 @@ from dojo.models import Finding -class DetectSecretsParser(object): +class DetectSecretsParser: """ A class that can be used to parse the detect-secrets JSON report file """ diff --git a/dojo/tools/dockerbench/parser.py b/dojo/tools/dockerbench/parser.py index 870c3bc31be..a00db912d2c 100644 --- a/dojo/tools/dockerbench/parser.py +++ b/dojo/tools/dockerbench/parser.py @@ -4,7 +4,7 @@ from datetime import datetime -class DockerBenchParser(object): +class DockerBenchParser: def get_scan_types(self): return ["docker-bench-security Scan"] @@ -90,7 +90,7 @@ def get_item(vuln, test, test_start, test_end, description): description += unique_id_from_tool if reason: description += "\n" - description += "desc: {}\n".format(reason) + description += f"desc: {reason}\n" if vuln.get("details"): description += "\n" description += vuln["details"] diff --git a/dojo/tools/dockle/parser.py b/dojo/tools/dockle/parser.py index 5c07472bedd..e2d0be9256f 100644 --- a/dojo/tools/dockle/parser.py +++ b/dojo/tools/dockle/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class DockleParser(object): +class DockleParser: """ A class that can be used to parse the Dockle JSON report files """ diff --git a/dojo/tools/drheader/parser.py b/dojo/tools/drheader/parser.py index eeeed1e5e17..158da541bd3 100644 --- a/dojo/tools/drheader/parser.py +++ b/dojo/tools/drheader/parser.py @@ -3,7 +3,7 @@ from dojo.models import Endpoint, Finding -class DrHeaderParser(object): +class DrHeaderParser: def get_scan_types(self): return ["DrHeader JSON Importer"] diff --git a/dojo/tools/eslint/parser.py b/dojo/tools/eslint/parser.py index c3e2167b8c3..df8628f5330 100644 --- a/dojo/tools/eslint/parser.py +++ b/dojo/tools/eslint/parser.py @@ -3,7 +3,7 @@ from dojo.models import Finding -class ESLintParser(object): +class ESLintParser: def get_scan_types(self): return ["ESLint Scan"] @@ -37,7 +37,7 @@ def get_findings(self, filename, test): for message in item["messages"]: if message["message"] is None: - title = str("Finding Not defined") + title = "Finding Not defined" else: title = str(message["message"]) diff --git a/dojo/tools/fortify/fpr_parser.py b/dojo/tools/fortify/fpr_parser.py index de745d236d6..8110a23cb9c 100644 --- a/dojo/tools/fortify/fpr_parser.py +++ b/dojo/tools/fortify/fpr_parser.py @@ -4,7 +4,7 @@ from dojo.models import Finding -class FortifyFPRParser(object): +class FortifyFPRParser: def parse_fpr(self, filename, test): if str(filename.__class__) == "