Skip to content

Commit

Permalink
Ruff: add pyupgrade (#9755)
Browse files Browse the repository at this point in the history
  • Loading branch information
kiblik authored Apr 6, 2024
1 parent ba78a3d commit a69b592
Show file tree
Hide file tree
Showing 282 changed files with 874 additions and 1,135 deletions.
4 changes: 2 additions & 2 deletions dojo/api_v2/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,11 @@ def flatten(elem):
return self.get_paginated_response(serializer.data)


class QuestionSubClassFieldsMixin(object):
class QuestionSubClassFieldsMixin:
def get_queryset(self):
return Question.objects.select_subclasses()


class AnswerSubClassFieldsMixin(object):
class AnswerSubClassFieldsMixin:
def get_queryset(self):
return Answer.objects.select_subclasses()
16 changes: 5 additions & 11 deletions dojo/api_v2/permissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,7 @@ def check_post_permission(request, post_model, post_pk, post_permission):
if request.method == "POST":
if request.data.get(post_pk) is None:
raise ParseError(
"Unable to check for permissions: Attribute '{}' is required".format(
post_pk
)
f"Unable to check for permissions: Attribute '{post_pk}' is required"
)
object = get_object_or_404(post_model, pk=request.data.get(post_pk))
return user_has_permission(request.user, object, post_permission)
Expand Down Expand Up @@ -965,8 +963,7 @@ def raise_no_auto_create_import_validation_error(
if product_name and not product:
if product_type_name:
raise serializers.ValidationError(
"Product '%s' doesn't exist in Product_Type '%s'"
% (product_name, product_type_name)
f"Product '{product_name}' doesn't exist in Product_Type '{product_type_name}'"
)
else:
raise serializers.ValidationError(
Expand All @@ -975,21 +972,18 @@ def raise_no_auto_create_import_validation_error(

if engagement_name and not engagement:
raise serializers.ValidationError(
"Engagement '%s' doesn't exist in Product '%s'"
% (engagement_name, product_name)
f"Engagement '{engagement_name}' doesn't exist in Product '{product_name}'"
)

# these are only set for reimport
if test_title:
raise serializers.ValidationError(
"Test '%s' with scan_type '%s' doesn't exist in Engagement '%s'"
% (test_title, scan_type, engagement_name)
f"Test '{test_title}' with scan_type '{scan_type}' doesn't exist in Engagement '{engagement_name}'"
)

if scan_type:
raise serializers.ValidationError(
"Test with scan_type '%s' doesn't exist in Engagement '%s'"
% (scan_type, engagement_name)
f"Test with scan_type '{scan_type}' doesn't exist in Engagement '{engagement_name}'"
)

raise ValidationError(error_message)
Expand Down
33 changes: 11 additions & 22 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ def __init__(self, **kwargs):
kwargs["style"] = {"base_template": "textarea.html"}
kwargs["style"].update(style)

super(TagListSerializerField, self).__init__(**kwargs)
super().__init__(**kwargs)

self.pretty_print = pretty_print

Expand Down Expand Up @@ -300,14 +300,14 @@ class TaggitSerializer(serializers.Serializer):
def create(self, validated_data):
to_be_tagged, validated_data = self._pop_tags(validated_data)

tag_object = super(TaggitSerializer, self).create(validated_data)
tag_object = super().create(validated_data)

return self._save_tags(tag_object, to_be_tagged)

def update(self, instance, validated_data):
to_be_tagged, validated_data = self._pop_tags(validated_data)

tag_object = super(TaggitSerializer, self).update(
tag_object = super().update(
instance, validated_data
)

Expand Down Expand Up @@ -389,7 +389,7 @@ def __init__(self, **kwargs):
if isinstance(data, list):
kwargs["many"] = True

super(RequestResponseSerializerField, self).__init__(**kwargs)
super().__init__(**kwargs)

self.pretty_print = pretty_print

Expand Down Expand Up @@ -1464,10 +1464,7 @@ def to_representation(self, data):
new_files.append(
{
"id": file.id,
"file": "{site_url}/{file_access_url}".format(
site_url=settings.SITE_URL,
file_access_url=file.get_accessible_url(test, test.id),
),
"file": f"{settings.SITE_URL}/{file.get_accessible_url(test, test.id)}",
"title": file.title,
}
)
Expand Down Expand Up @@ -2306,13 +2303,11 @@ def validate(self, data):
file = data.get("file")
if not file and requires_file(scan_type):
raise serializers.ValidationError(
"Uploading a Report File is required for {}".format(scan_type)
f"Uploading a Report File is required for {scan_type}"
)
if file and is_scan_file_too_large(file):
raise serializers.ValidationError(
"Report file is too large. Maximum supported size is {} MB".format(
settings.SCAN_FILE_MAX_SIZE
)
f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB"
)
tool_type = requires_tool_type(scan_type)
if tool_type:
Expand Down Expand Up @@ -2665,13 +2660,11 @@ def validate(self, data):
file = data.get("file")
if not file and requires_file(scan_type):
raise serializers.ValidationError(
"Uploading a Report File is required for {}".format(scan_type)
f"Uploading a Report File is required for {scan_type}"
)
if file and is_scan_file_too_large(file):
raise serializers.ValidationError(
"Report file is too large. Maximum supported size is {} MB".format(
settings.SCAN_FILE_MAX_SIZE
)
f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB"
)
tool_type = requires_tool_type(scan_type)
if tool_type:
Expand Down Expand Up @@ -2712,9 +2705,7 @@ def validate(self, data):
file = data.get("file")
if file and is_scan_file_too_large(file):
raise serializers.ValidationError(
"Report file is too large. Maximum supported size is {} MB".format(
settings.SCAN_FILE_MAX_SIZE
)
f"Report file is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB"
)

return data
Expand Down Expand Up @@ -2818,9 +2809,7 @@ def save(self):
def validate(self, data):
if is_scan_file_too_large(data["file"]):
raise serializers.ValidationError(
"File is too large. Maximum supported size is {} MB".format(
settings.SCAN_FILE_MAX_SIZE
)
f"File is too large. Maximum supported size is {settings.SCAN_FILE_MAX_SIZE} MB"
)
return data

Expand Down
6 changes: 2 additions & 4 deletions dojo/api_v2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1214,9 +1214,7 @@ def remove_tags(self, request, pk=None):
if tag not in all_tags:
return Response(
{
"error": "'{}' is not a valid tag in list".format(
tag
)
"error": f"'{tag}' is not a valid tag in list"
},
status=status.HTTP_400_BAD_REQUEST,
)
Expand Down Expand Up @@ -2877,7 +2875,7 @@ def report_generate(request, obj, options):
include_executive_summary = False
include_table_of_contents = False

report_info = "Generated By %s on %s" % (
report_info = "Generated By {} on {}".format(
user.get_full_name(),
(timezone.now().strftime("%m/%d/%Y %I:%M%p %Z")),
)
Expand Down
6 changes: 3 additions & 3 deletions dojo/authorization/authorization.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ def user_has_global_permission_or_403(user, permission):
def get_roles_for_permission(permission):
if not Permissions.has_value(permission):
raise PermissionDoesNotExistError(
"Permission {} does not exist".format(permission)
f"Permission {permission} does not exist"
)
roles_for_permissions = set()
roles = get_roles_with_permissions()
Expand All @@ -274,7 +274,7 @@ def role_has_permission(role, permission):
if role is None:
return False
if not Roles.has_value(role):
raise RoleDoesNotExistError("Role {} does not exist".format(role))
raise RoleDoesNotExistError(f"Role {role} does not exist")
roles = get_roles_with_permissions()
permissions = roles.get(role)
if not permissions:
Expand All @@ -286,7 +286,7 @@ def role_has_global_permission(role, permission):
if role is None:
return False
if not Roles.has_value(role):
raise RoleDoesNotExistError("Role {} does not exist".format(role))
raise RoleDoesNotExistError(f"Role {role} does not exist")
roles = get_global_roles_with_permissions()
permissions = roles.get(role)
if permissions and permission in permissions:
Expand Down
2 changes: 1 addition & 1 deletion dojo/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

@app.task(bind=True)
def debug_task(self):
print(('Request: {0!r}'.format(self.request)))
print(f'Request: {self.request!r}')


@setup_logging.connect
Expand Down
2 changes: 1 addition & 1 deletion dojo/components/sql_group_concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ def __init__(
self, expression, separator, distinct=False, ordering=None, **extra
):
self.separator = separator
super(Sql_GroupConcat, self).__init__(
super().__init__(
expression,
distinct="DISTINCT " if distinct else "",
ordering=" ORDER BY %s" % ordering if ordering is not None else "",
Expand Down
2 changes: 1 addition & 1 deletion dojo/development_environment/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def edit_dev_env(request, deid):
except RestrictedError as err:
messages.add_message(request,
messages.WARNING,
'Environment cannot be deleted: {}'.format(err),
f'Environment cannot be deleted: {err}',
extra_tags='alert-warning')
return HttpResponseRedirect(reverse('dev_env'))

Expand Down
46 changes: 23 additions & 23 deletions dojo/endpoint/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def clean_hosts_run(apps, change):
def err_log(message, html_log, endpoint_html_log, endpoint):
error_suffix = 'It is not possible to migrate it. Delete or edit this endpoint.'
html_log.append({**endpoint_html_log, **{'message': message}})
logger.error('Endpoint (id={}) {}. {}'.format(endpoint.pk, message, error_suffix))
logger.error(f'Endpoint (id={endpoint.pk}) {message}. {error_suffix}')
broken_endpoints.add(endpoint.pk)
html_log = []
broken_endpoints = set()
Expand Down Expand Up @@ -120,8 +120,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint):

if parts.protocol:
if endpoint.protocol and (endpoint.protocol != parts.protocol):
message = 'has defined protocol ({}) and it is not the same as protocol in host ' \
'({})'.format(endpoint.protocol, parts.protocol)
message = f'has defined protocol ({endpoint.protocol}) and it is not the same as protocol in host ' \
f'({parts.protocol})'
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
Expand All @@ -135,44 +135,44 @@ def err_log(message, html_log, endpoint_html_log, endpoint):
if change:
endpoint.host = parts.host
else:
message = '"{}" use invalid format of host'.format(endpoint.host)
message = f'"{endpoint.host}" use invalid format of host'
err_log(message, html_log, endpoint_html_log, endpoint)

if parts.port:
try:
if (endpoint.port is not None) and (int(endpoint.port) != parts.port):
message = 'has defined port number ({}) and it is not the same as port number in ' \
'host ({})'.format(endpoint.port, parts.port)
message = f'has defined port number ({endpoint.port}) and it is not the same as port number in ' \
f'host ({parts.port})'
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
endpoint.port = parts.port
except ValueError:
message = 'uses non-numeric port: {}'.format(endpoint.port)
message = f'uses non-numeric port: {endpoint.port}'
err_log(message, html_log, endpoint_html_log, endpoint)

if parts.path:
if endpoint.path and (endpoint.path != parts.path):
message = 'has defined path ({}) and it is not the same as path in host ' \
'({})'.format(endpoint.path, parts.path)
message = f'has defined path ({endpoint.path}) and it is not the same as path in host ' \
f'({parts.path})'
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
endpoint.path = parts.path

if parts.query:
if endpoint.query and (endpoint.query != parts.query):
message = 'has defined query ({}) and it is not the same as query in host ' \
'({})'.format(endpoint.query, parts.query)
message = f'has defined query ({endpoint.query}) and it is not the same as query in host ' \
f'({parts.query})'
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
endpoint.query = parts.query

if parts.fragment:
if endpoint.fragment and (endpoint.fragment != parts.fragment):
message = 'has defined fragment ({}) and it is not the same as fragment in host ' \
'({})'.format(endpoint.fragment, parts.fragment)
message = f'has defined fragment ({endpoint.fragment}) and it is not the same as fragment in host ' \
f'({parts.fragment})'
err_log(message, html_log, endpoint_html_log, endpoint)
else:
if change:
Expand All @@ -182,7 +182,7 @@ def err_log(message, html_log, endpoint_html_log, endpoint):
endpoint.save()

except ValidationError:
message = '"{}" uses invalid format of host'.format(endpoint.host)
message = f'"{endpoint.host}" uses invalid format of host'
err_log(message, html_log, endpoint_html_log, endpoint)

try:
Expand All @@ -197,8 +197,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint):
err_log('Missing product', html_log, endpoint_html_log, endpoint)

if broken_endpoints:
logger.error('It is not possible to migrate database because there is/are {} broken endpoint(s). '
'Please check logs.'.format(len(broken_endpoints)))
logger.error(f'It is not possible to migrate database because there is/are {len(broken_endpoints)} broken endpoint(s). '
'Please check logs.')
else:
logger.info('There is not broken endpoint.')

Expand All @@ -223,8 +223,8 @@ def err_log(message, html_log, endpoint_html_log, endpoint):
to_be_deleted.update(ep_ids[1:])
if change:
message = "Merging Endpoints {} into '{}'".format(
["{} (id={})".format(str(x), x.pk) for x in ep[1:]],
"{} (id={})".format(str(ep[0]), ep[0].pk))
[f"{str(x)} (id={x.pk})" for x in ep[1:]],
f"{str(ep[0])} (id={ep[0].pk})")
html_log.append(message)
logger.info(message)
Endpoint_Status_model.objects\
Expand All @@ -240,18 +240,18 @@ def err_log(message, html_log, endpoint_html_log, endpoint):
.filter(finding=eps['finding'])\
.order_by('-last_modified')
message = "Endpoint Statuses {} will be replaced by '{}'".format(
["last_modified: {} (id={})".format(x.last_modified, x.pk) for x in esm[1:]],
"last_modified: {} (id={})".format(esm[0].last_modified, esm[0].pk))
[f"last_modified: {x.last_modified} (id={x.pk})" for x in esm[1:]],
f"last_modified: {esm[0].last_modified} (id={esm[0].pk})")
html_log.append(message)
logger.info(message)
esm.exclude(id=esm[0].pk).delete()

if to_be_deleted:
if change:
message = "Removing endpoints: {}".format(list(to_be_deleted))
message = f"Removing endpoints: {list(to_be_deleted)}"
Endpoint_model.objects.filter(id__in=to_be_deleted).delete()
else:
message = "Redundant endpoints: {}, migration is required.".format(list(to_be_deleted))
message = f"Redundant endpoints: {list(to_be_deleted)}, migration is required."
html_log.append(message)
logger.info(message)

Expand Down Expand Up @@ -283,7 +283,7 @@ def validate_endpoints_to_add(endpoints_to_add):
except ValidationError as ves:
for ve in ves:
errors.append(
ValidationError("Invalid endpoint {}: {}".format(endpoint, ve))
ValidationError(f"Invalid endpoint {endpoint}: {ve}")
)
return endpoint_list, errors

Expand Down
Loading

0 comments on commit a69b592

Please sign in to comment.