From 1b40c2b245f6312cdc505e45ae4d3e261803373c Mon Sep 17 00:00:00 2001 From: Ben Cassell <98852248+benc-db@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:05:07 -0800 Subject: [PATCH] Switching to Ruff for formatting and linting (#847) --- .pre-commit-config.yaml | 20 ---------- CHANGELOG.md | 1 + black.ini | 4 -- dbt/adapters/databricks/api_client.py | 2 - dbt/adapters/databricks/connections.py | 1 - dbt/adapters/databricks/impl.py | 5 +-- .../python_models/python_submissions.py | 1 - dbt/adapters/databricks/utils.py | 38 ------------------- dev-requirements.txt | 6 +-- ruff.toml | 6 +++ .../adapter/columns/test_get_columns.py | 1 - .../macros/relations/test_table_macros.py | 5 +-- tox.ini | 8 ++-- 13 files changed, 17 insertions(+), 81 deletions(-) delete mode 100644 .pre-commit-config.yaml delete mode 100644 black.ini create mode 100644 ruff.toml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index b95733694..000000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# See https://pre-commit.com for more information -# See https://pre-commit.com/hooks.html for more hooks -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files - - repo: https://github.com/psf/black-pre-commit-mirror - rev: 24.3.0 - hooks: - - id: black - language_version: python3.11 - args: [--config, black.ini] - - repo: https://github.com/asottile/reorder-python-imports - rev: v3.12.0 - hooks: - - id: reorder-python-imports diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b1884d48..2a58b1c9f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ - Prepare for python typing deprecations ([837](https://github.com/databricks/dbt-databricks/pull/837)) - Fix behavior flag use in init of DatabricksAdapter (thanks @VersusFacit!) ([836](https://github.com/databricks/dbt-databricks/pull/836)) - Restrict pydantic to V1 per dbt Labs' request ([843](https://github.com/databricks/dbt-databricks/pull/843)) +- Switching to Ruff for formatting and linting ([847](https://github.com/databricks/dbt-databricks/pull/847)) ## dbt-databricks 1.8.7 (October 10, 2024) diff --git a/black.ini b/black.ini deleted file mode 100644 index c3f6d14f3..000000000 --- a/black.ini +++ /dev/null @@ -1,4 +0,0 @@ -[tool.black] -required-version = "24.3.0" -line-length = 100 -target-version = ['py38'] diff --git a/dbt/adapters/databricks/api_client.py b/dbt/adapters/databricks/api_client.py index 5ec2d2435..a821ac9ef 100644 --- a/dbt/adapters/databricks/api_client.py +++ b/dbt/adapters/databricks/api_client.py @@ -142,7 +142,6 @@ def get_folder(self, _: str, schema: str) -> str: class CurrUserApi(DatabricksApi): - def __init__(self, session: Session, host: str): super().__init__(session, host, "/api/2.0/preview/scim/v2") self._user = "" @@ -401,7 +400,6 @@ def get(self, job_id: str) -> dict[str, Any]: class WorkflowJobApi(DatabricksApi): - def __init__(self, session: Session, host: str): super().__init__(session, host, "/api/2.1/jobs") diff --git a/dbt/adapters/databricks/connections.py b/dbt/adapters/databricks/connections.py index 83b55a999..76da3e941 100644 --- a/dbt/adapters/databricks/connections.py +++ b/dbt/adapters/databricks/connections.py @@ -868,7 +868,6 @@ def cleanup_all(self) -> None: def _update_compute_connection( self, conn: DatabricksDBTConnection, new_name: str ) -> DatabricksDBTConnection: - if conn.name == new_name and conn.state == ConnectionState.OPEN: # Found a connection and nothing to do, so just return it return conn diff --git a/dbt/adapters/databricks/impl.py b/dbt/adapters/databricks/impl.py index 094b09501..e5b0ca481 100644 --- a/dbt/adapters/databricks/impl.py +++ b/dbt/adapters/databricks/impl.py @@ -68,7 +68,6 @@ from dbt.adapters.databricks.relation_configs.tblproperties import TblPropertiesConfig from dbt.adapters.databricks.utils import get_first_row, handle_missing_objects from dbt.adapters.databricks.utils import redact_credentials -from dbt.adapters.databricks.utils import undefined_proof from dbt.adapters.relation_configs import RelationResults from dbt.adapters.spark.impl import DESCRIBE_TABLE_EXTENDED_MACRO_NAME from dbt.adapters.spark.impl import GET_COLUMNS_IN_RELATION_RAW_MACRO_NAME @@ -165,7 +164,6 @@ def get_identifier_list_string(table_names: set[str]) -> str: return _identifier -@undefined_proof class DatabricksAdapter(SparkAdapter): INFORMATION_COMMENT_REGEX = re.compile(r"Comment: (.*)\n[A-Z][A-Za-z ]+:", re.DOTALL) @@ -364,7 +362,8 @@ def _get_hive_relations( new_rows: list[tuple[str, Optional[str]]] if all([relation.database, relation.schema]): tables = self.connections.list_tables( - database=relation.database, schema=relation.schema # type: ignore[arg-type] + database=relation.database, # type: ignore[arg-type] + schema=relation.schema, # type: ignore[arg-type] ) new_rows = [] diff --git a/dbt/adapters/databricks/python_models/python_submissions.py b/dbt/adapters/databricks/python_models/python_submissions.py index a28a15619..426b9a777 100644 --- a/dbt/adapters/databricks/python_models/python_submissions.py +++ b/dbt/adapters/databricks/python_models/python_submissions.py @@ -211,7 +211,6 @@ def __init__( self.additional_job_settings = parsed_model.config.python_job_config.dict() def compile(self, path: str) -> PythonJobDetails: - job_spec: dict[str, Any] = { "task_key": "inner_notebook", "notebook_task": { diff --git a/dbt/adapters/databricks/utils.py b/dbt/adapters/databricks/utils.py index 2fbf73115..a48356afc 100644 --- a/dbt/adapters/databricks/utils.py +++ b/dbt/adapters/databricks/utils.py @@ -1,6 +1,4 @@ from collections.abc import Callable -import functools -import inspect import re from typing import Any from typing import TYPE_CHECKING @@ -45,42 +43,6 @@ def remove_undefined(v: Any) -> Any: return None if isinstance(v, Undefined) else v -def undefined_proof(cls: type[A]) -> type[A]: - for name in cls._available_: - func = getattr(cls, name) - if not callable(func): - continue - try: - static_attr = inspect.getattr_static(cls, name) - isstatic = isinstance(static_attr, staticmethod) - isclass = isinstance(static_attr, classmethod) - except AttributeError: - isstatic = False - isclass = False - wrapped_function = _wrap_function(func.__func__ if isclass else func) - setattr( - cls, - name, - ( - staticmethod(wrapped_function) - if isstatic - else classmethod(wrapped_function) if isclass else wrapped_function - ), - ) - - return cls - - -def _wrap_function(func: Callable) -> Callable: - @functools.wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> Any: - new_args = [remove_undefined(arg) for arg in args] - new_kwargs = {key: remove_undefined(value) for key, value in kwargs.items()} - return func(*new_args, **new_kwargs) - - return wrapper - - def remove_ansi(line: str) -> str: ansi_escape = re.compile(r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]") return ansi_escape.sub("", line) diff --git a/dev-requirements.txt b/dev-requirements.txt index 5ac3264ee..d72231626 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,6 +1,5 @@ -black~=24.3.0 -flake8 -flaky +ruff + freezegun~=1.5.0 ipdb mock>=1.3.0 @@ -13,6 +12,5 @@ pytz tox>=3.2.0 types-requests types-mock -pre-commit dbt-tests-adapter>=1.10.2, <2.0 diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 000000000..a544fb732 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,6 @@ +line-length = 100 +target-version = 'py39' + +[lint] +select = ["E", "W", "F"] +ignore = ["E203"] diff --git a/tests/functional/adapter/columns/test_get_columns.py b/tests/functional/adapter/columns/test_get_columns.py index 04e73a663..383dd2c6a 100644 --- a/tests/functional/adapter/columns/test_get_columns.py +++ b/tests/functional/adapter/columns/test_get_columns.py @@ -20,7 +20,6 @@ def setup(self, project): @pytest.fixture(scope="class") def expected_columns(self): - return [ DatabricksColumn( column="struct_col", diff --git a/tests/unit/macros/relations/test_table_macros.py b/tests/unit/macros/relations/test_table_macros.py index 61e49d418..9c5635131 100644 --- a/tests/unit/macros/relations/test_table_macros.py +++ b/tests/unit/macros/relations/test_table_macros.py @@ -29,9 +29,8 @@ def context(self, template) -> dict: def render_create_table_as(self, template_bundle, temporary=False, sql="select 1"): external_path = f"/mnt/root/{template_bundle.relation.identifier}" - template_bundle.template.globals["adapter"].compute_external_path.return_value = ( - external_path - ) + adapter_mock = template_bundle.template.globals["adapter"] + adapter_mock.compute_external_path.return_value = external_path return self.run_macro( template_bundle.template, "databricks__create_table_as", diff --git a/tox.ini b/tox.ini index 17fcde6de..e0ba45ead 100644 --- a/tox.ini +++ b/tox.ini @@ -5,8 +5,8 @@ envlist = linter, unit [testenv:linter] basepython = python3 commands = - {envpython} -m black --config black.ini --check dbt tests - {envpython} -m flake8 --select=E,W,F --ignore=E203,W503 --max-line-length=100 dbt tests + {envpython} -m ruff format --check + {envpython} -m ruff check {envpython} -m mypy --config-file mypy.ini --explicit-package-bases dbt tests passenv = DBT_* @@ -15,9 +15,9 @@ deps = -r{toxinidir}/dev-requirements.txt -r{toxinidir}/requirements.txt -[testenv:black] +[testenv:format] basepython = python3 -commands = {envpython} -m black --config black.ini . +commands = {envpython} -m ruff format passenv = DBT_* PYTEST_ADDOPTS