Skip to content

Commit

Permalink
Update pre-commit hooks configuration to use ruff instead of black
Browse files Browse the repository at this point in the history
  • Loading branch information
Chubercik authored and Repiteo committed May 21, 2024
1 parent aaa4560 commit d9f8ef6
Show file tree
Hide file tree
Showing 47 changed files with 240 additions and 237 deletions.
10 changes: 5 additions & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@ repos:
platform/android/java/lib/src/com/.*
)
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.2.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.4
hooks:
- id: black
files: (\.py$|SConstruct|SCsub)
types_or: [text]
- id: ruff
args: [--fix]
- id: ruff-format

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.971
Expand Down
24 changes: 12 additions & 12 deletions SConstruct
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ import os
import pickle
import sys
import time
from types import ModuleType
from collections import OrderedDict
from importlib.util import spec_from_file_location, module_from_spec
from SCons import __version__ as scons_raw_version
from importlib.util import module_from_spec, spec_from_file_location
from types import ModuleType

from SCons import __version__ as scons_raw_version

# Explicitly resolve the helper modules, this is done to avoid clash with
# modules of the same name that might be randomly added (e.g. someone adding
Expand Down Expand Up @@ -53,12 +53,12 @@ _helper_module("core.core_builders", "core/core_builders.py")
_helper_module("main.main_builders", "main/main_builders.py")

# Local
import methods
import glsl_builders
import gles3_builders
import glsl_builders
import methods
import scu_builders
from methods import print_warning, print_error
from platform_methods import architectures, architecture_aliases
from methods import print_error, print_warning
from platform_methods import architecture_aliases, architectures

if ARGUMENTS.get("target", "editor") == "editor":
_helper_module("editor.editor_builders", "editor/editor_builders.py")
Expand All @@ -68,7 +68,7 @@ if ARGUMENTS.get("target", "editor") == "editor":
# <https://github.com/python/cpython/issues/73245>
if sys.stdout.isatty() and sys.platform == "win32":
try:
from ctypes import windll, byref, WinError # type: ignore
from ctypes import WinError, byref, windll # type: ignore
from ctypes.wintypes import DWORD # type: ignore

stdout_handle = windll.kernel32.GetStdHandle(DWORD(-11))
Expand Down Expand Up @@ -562,15 +562,15 @@ if env["build_profile"] != "":
dbo = ft["disabled_build_options"]
for c in dbo:
env[c] = dbo[c]
except:
except json.JSONDecodeError:
print_error('Failed to open feature build profile: "{}"'.format(env["build_profile"]))
Exit(255)

# Platform specific flags.
# These can sometimes override default options.
flag_list = platform_flags[env["platform"]]
for f in flag_list:
if not (f[0] in ARGUMENTS) or ARGUMENTS[f[0]] == "auto": # Allow command line to override platform flags
if f[0] not in ARGUMENTS or ARGUMENTS[f[0]] == "auto": # Allow command line to override platform flags
env[f[0]] = f[1]

# 'dev_mode' and 'production' are aliases to set default options if they haven't been
Expand All @@ -591,7 +591,7 @@ if env["production"]:
# Run SCU file generation script if in a SCU build.
if env["scu_build"]:
max_includes_per_scu = 8
if env.dev_build == True:
if env.dev_build:
max_includes_per_scu = 1024

read_scu_limit = int(env["scu_limit"])
Expand Down Expand Up @@ -984,7 +984,7 @@ GLSL_BUILDERS = {
env.Append(BUILDERS=GLSL_BUILDERS)

scons_cache_path = os.environ.get("SCONS_CACHE")
if scons_cache_path != None:
if scons_cache_path is not None:
CacheDir(scons_cache_path)
print("Scons cache enabled... (path: '" + scons_cache_path + "')")

Expand Down
12 changes: 5 additions & 7 deletions core/SCsub
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@

Import("env")

import os

import core_builders

import methods
import os

env.core_sources = []

Expand Down Expand Up @@ -188,9 +190,7 @@ def version_info_builder(target, source, env):
#define VERSION_WEBSITE "{website}"
#define VERSION_DOCS_BRANCH "{docs_branch}"
#define VERSION_DOCS_URL "https://docs.godotengine.org/en/" VERSION_DOCS_BRANCH
""".format(
**env.version_info
)
""".format(**env.version_info)
)


Expand All @@ -206,9 +206,7 @@ def version_hash_builder(target, source, env):
const char *const VERSION_HASH = "{git_hash}";
const uint64_t VERSION_TIMESTAMP = {git_timestamp};
""".format(
**env.version_info
)
""".format(**env.version_info)
)


Expand Down
12 changes: 6 additions & 6 deletions core/core_builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def next_line(self):
return line

def next_tag(self):
if not ":" in self.current:
if ":" not in self.current:
return ("", [])
tag, line = self.current.split(":", 1)
lines = [line.strip()]
Expand All @@ -206,7 +206,7 @@ def next_tag(self):

if not tag or not reader.current:
# end of a paragraph start a new part
if "License" in part and not "Files" in part:
if "License" in part and "Files" not in part:
# no Files tag in this one, so assume standalone license
license_list.append(part["License"])
part = {}
Expand Down Expand Up @@ -298,13 +298,13 @@ def next_tag(self):
f.write("const int LICENSE_COUNT = " + str(len(license_list)) + ";\n")

f.write("const char *const LICENSE_NAMES[] = {\n")
for l in license_list:
f.write('\t"' + escape_string(l[0]) + '",\n')
for license in license_list:
f.write('\t"' + escape_string(license[0]) + '",\n')
f.write("};\n\n")

f.write("const char *const LICENSE_BODIES[] = {\n\n")
for l in license_list:
for line in l[1:]:
for license in license_list:
for line in license[1:]:
if line == ".":
f.write('\t"\\n"\n')
else:
Expand Down
2 changes: 1 addition & 1 deletion core/extension/SCsub
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

Import("env")

import make_wrappers
import make_interface_dumper
import make_wrappers

env.CommandNoCache(["ext_wrappers.gen.inc"], "make_wrappers.py", env.Run(make_wrappers.run))
env.CommandNoCache(
Expand Down
2 changes: 0 additions & 2 deletions core/extension/make_wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
def generate_mod_version(argcount, const=False, returns=False):
s = proto_mod
sproto = str(argcount)
method_info = ""
if returns:
sproto += "R"
s = s.replace("$RETTYPE", "m_ret, ")
Expand Down Expand Up @@ -68,7 +67,6 @@ def generate_mod_version(argcount, const=False, returns=False):
def generate_ex_version(argcount, const=False, returns=False):
s = proto_ex
sproto = str(argcount)
method_info = ""
if returns:
sproto += "R"
s = s.replace("$RETTYPE", "m_ret, ")
Expand Down
1 change: 0 additions & 1 deletion core/input/SCsub
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ Import("env")

import input_builders


# Order matters here. Higher index controller database files write on top of lower index database files.
controller_databases = [
"gamecontrollerdb.txt",
Expand Down
8 changes: 4 additions & 4 deletions doc/tools/doc_status.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
#!/usr/bin/env python3

import fnmatch
import os
import sys
import re
import math
import os
import platform
import re
import sys
import xml.etree.ElementTree as ET
from typing import Dict, List, Set

Expand Down Expand Up @@ -286,7 +286,7 @@ def generate_for_class(c: ET.Element):
status.progresses[tag.tag].increment(is_deprecated or is_experimental or has_descr)
elif tag.tag in ["constants", "members", "theme_items"]:
for sub_tag in list(tag):
if not sub_tag.text is None:
if sub_tag.text is not None:
is_deprecated = "deprecated" in sub_tag.attrib
is_experimental = "experimental" in sub_tag.attrib
has_descr = len(sub_tag.text.strip()) > 0
Expand Down
11 changes: 5 additions & 6 deletions doc/tools/make_rst.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@

import argparse
import os
import platform
import re
import sys
import xml.etree.ElementTree as ET
from collections import OrderedDict
from typing import List, Dict, TextIO, Tuple, Optional, Any, Union
from typing import Any, Dict, List, Optional, TextIO, Tuple, Union

# Import hardcoded version information from version.py
root_directory = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../")
sys.path.append(root_directory) # Include the root directory
import version
import version # noqa: E402

# $DOCS_URL/path/to/page.html(#fragment-tag)
GODOT_DOCS_PATTERN = re.compile(r"^\$DOCS_URL/(.*)\.html(#.*)?$")
Expand Down Expand Up @@ -706,7 +705,7 @@ def main() -> None:
# <https://github.com/python/cpython/issues/73245>
if should_color and sys.stdout.isatty() and sys.platform == "win32":
try:
from ctypes import windll, byref, WinError # type: ignore
from ctypes import WinError, byref, windll # type: ignore
from ctypes.wintypes import DWORD # type: ignore

stdout_handle = windll.kernel32.GetStdHandle(DWORD(-11))
Expand Down Expand Up @@ -1413,7 +1412,7 @@ def make_rst_class(class_def: ClassDef, state: State, dry_run: bool, output_dir:
operator_anchor = f".. _class_{class_name}_operator_{sanitize_operator_name(m.name, state)}"
for parameter in m.parameters:
operator_anchor += f"_{parameter.type_name.type_name}"
operator_anchor += f":\n\n"
operator_anchor += ":\n\n"
f.write(operator_anchor)

f.write(".. rst-class:: classref-operator\n\n")
Expand Down Expand Up @@ -1553,7 +1552,7 @@ def make_method_signature(
out += f":ref:`{op_name}<class_{class_def.name}_{ref_type}_{sanitize_operator_name(definition.name, state)}"
for parameter in definition.parameters:
out += f"_{parameter.type_name.type_name}"
out += f">`"
out += ">`"
elif ref_type == "method":
ref_type_qualifier = ""
if definition.name.startswith("_"):
Expand Down
5 changes: 3 additions & 2 deletions editor/SCsub
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@ Import("env")

env.editor_sources = []

import os
import glob
import os

import editor_builders
import methods

import methods

if env.editor_build:
# Generate doc data paths
Expand Down
1 change: 1 addition & 0 deletions editor/editor_builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import tempfile
import uuid
import zlib

from methods import print_warning


Expand Down
2 changes: 1 addition & 1 deletion editor/icons/SCsub
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
Import("env")

import os
import editor_icons_builders

import editor_icons_builders

env["BUILDERS"]["MakeEditorIconsBuilder"] = Builder(
action=env.Run(editor_icons_builders.make_editor_icons_action),
Expand Down
2 changes: 1 addition & 1 deletion editor/themes/SCsub
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
Import("env")

import glob
import editor_theme_builders

import editor_theme_builders

# Fonts
flist = glob.glob(env.Dir("#thirdparty").abspath + "/fonts/*.ttf")
Expand Down
13 changes: 7 additions & 6 deletions gles3_builders.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
"""Functions used to generate source files during build time"""

import os.path
from methods import print_error
from typing import Optional

from methods import print_error


class GLES3HeaderStruct:
def __init__(self):
Expand Down Expand Up @@ -91,11 +92,11 @@ def include_file_in_gles3_header(filename: str, header_data: GLES3HeaderStruct,
includeline = line.replace("#include ", "").strip()[1:-1]

included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
if included_file not in header_data.vertex_included_files and header_data.reading == "vertex":
header_data.vertex_included_files += [included_file]
if include_file_in_gles3_header(included_file, header_data, depth + 1) is None:
print_error(f'In file "{filename}": #include "{includeline}" could not be found!"')
elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
elif included_file not in header_data.fragment_included_files and header_data.reading == "fragment":
header_data.fragment_included_files += [included_file]
if include_file_in_gles3_header(included_file, header_data, depth + 1) is None:
print_error(f'In file "{filename}": #include "{includeline}" could not be found!"')
Expand All @@ -121,7 +122,7 @@ def include_file_in_gles3_header(filename: str, header_data: GLES3HeaderStruct,
# unfiorm array
x = x[: x.find("[")]

if not x in header_data.texunit_names:
if x not in header_data.texunit_names:
header_data.texunits += [(x, texunit)]
header_data.texunit_names += [x]

Expand All @@ -142,7 +143,7 @@ def include_file_in_gles3_header(filename: str, header_data: GLES3HeaderStruct,
# unfiorm array
x = x[: x.find("[")]

if not x in header_data.ubo_names:
if x not in header_data.ubo_names:
header_data.ubos += [(x, ubo)]
header_data.ubo_names += [x]

Expand All @@ -157,7 +158,7 @@ def include_file_in_gles3_header(filename: str, header_data: GLES3HeaderStruct,
# unfiorm array
x = x[: x.find("[")]

if not x in header_data.uniforms:
if x not in header_data.uniforms:
header_data.uniforms += [x]

if (line.strip().find("out ") == 0 or line.strip().find("flat ") == 0) and line.find("tfb:") != -1:
Expand Down
9 changes: 5 additions & 4 deletions glsl_builders.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""Functions used to generate source files during build time"""

import os.path
from typing import Iterable, Optional

from methods import print_error
from typing import Optional, Iterable


def generate_inline_code(input_lines: Iterable[str], insert_newline: bool = True):
Expand Down Expand Up @@ -77,15 +78,15 @@ def include_file_in_rd_header(filename: str, header_data: RDHeaderStruct, depth:
else:
included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)

if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
if included_file not in header_data.vertex_included_files and header_data.reading == "vertex":
header_data.vertex_included_files += [included_file]
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
print_error(f'In file "{filename}": #include "{includeline}" could not be found!"')
elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
elif included_file not in header_data.fragment_included_files and header_data.reading == "fragment":
header_data.fragment_included_files += [included_file]
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
print_error(f'In file "{filename}": #include "{includeline}" could not be found!"')
elif not included_file in header_data.compute_included_files and header_data.reading == "compute":
elif included_file not in header_data.compute_included_files and header_data.reading == "compute":
header_data.compute_included_files += [included_file]
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
print_error(f'In file "{filename}": #include "{includeline}" could not be found!"')
Expand Down
Loading

0 comments on commit d9f8ef6

Please sign in to comment.