From a9f1e14ade33a14ecb1d08c1c38d9c01d86f2f75 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 19 Apr 2024 12:42:43 -0400 Subject: [PATCH] Add and configure `pre-commit` (#174) --- .../Under the Hood-20240417-192843.yaml | 6 + .github/ISSUE_TEMPLATE/internal-epic.yml | 6 +- .../internal-feature-implementation.yml | 6 +- .github/actions/setup-hatch/action.yml | 8 ++ .github/workflows/code-quality.yml | 14 +-- .github/workflows/github-release.yml | 2 +- .github/workflows/release_prep_hatch.yml | 2 +- .gitignore | 2 +- .pre-commit-config.yaml | 112 +++++++++--------- CONTRIBUTING.md | 2 +- .../tests/adapter/hooks/test_model_hooks.py | 1 + .../unit_testing/test_invalid_input.py | 14 ++- dbt/adapters/__init__.py | 1 + dbt/adapters/base/connections.py | 4 +- dbt/adapters/base/impl.py | 17 +-- dbt/adapters/contracts/relation.py | 6 +- dbt/adapters/events/README.md | 2 +- dbt/adapters/factory.py | 15 +-- dbt/adapters/protocol.py | 78 ++++-------- dbt/adapters/relation_configs/README.md | 2 +- .../relation_configs/config_change.py | 4 +- .../macros/materializations/tests/helpers.sql | 2 +- .../macros/unit_test_sql/get_fixture_sql.sql | 2 +- pyproject.toml | 60 +--------- 24 files changed, 152 insertions(+), 216 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20240417-192843.yaml diff --git a/.changes/unreleased/Under the Hood-20240417-192843.yaml b/.changes/unreleased/Under the Hood-20240417-192843.yaml new file mode 100644 index 00000000..94ec9292 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240417-192843.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Validate that dbt-core and dbt-adapters remain de-coupled +time: 2024-04-17T19:28:43.400023-04:00 +custom: + Author: mikealfare + Issue: "144" diff --git a/.github/ISSUE_TEMPLATE/internal-epic.yml b/.github/ISSUE_TEMPLATE/internal-epic.yml index 2f3bfaa7..8cfb3aef 100644 --- a/.github/ISSUE_TEMPLATE/internal-epic.yml +++ b/.github/ISSUE_TEMPLATE/internal-epic.yml @@ -30,7 +30,7 @@ body: label: Objectives description: | What are the high level goals we are trying to achieve? Provide use cases if available. - + Example: - [ ] Allow adapter maintainers to support custom materializations - [ ] Reduce maintenance burden for incremental users by offering materialized views @@ -48,7 +48,7 @@ body: Provide a list of GH issues that will build out this functionality. This may start empty, or as a checklist of items. However, it should eventually become a list of Feature Implementation tickets. - + Example: - [ ] Create new macro to select warehouse - [ ] https://github.com/dbt-labs/dbt-adapters/issues/42 @@ -66,7 +66,7 @@ body: Provide a list of relevant documentation. Is there a proof of concept? Does this require and RFCs, ADRs, etc.? If the documentation exists, link it; if it does not exist yet, reference it descriptively. - + Example: - [ ] RFC for updating connection interface to accept new parameters - [ ] POC: https://github.com/dbt-labs/dbt-adapters/pull/42 diff --git a/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml b/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml index ab3c4ffc..7a99365b 100644 --- a/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml +++ b/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml @@ -41,7 +41,7 @@ body: label: Acceptance criteria description: | What is the definition of done for this feature? Include any relevant edge cases and/or test cases. - + Example: - [ ] If there are no config changes, don't alter the materialized view - [ ] If the materialized view is scheduled to refresh, a manual refresh should not be issued @@ -58,7 +58,7 @@ body: description: | Provide scenarios to test. Include both positive and negative tests if possible. Link to existing similar tests if appropriate. - + Example: - [ ] Test with no `materialized` field in the model config. Expect pass. - [ ] Test with a `materialized` field in the model config that is not valid. Expect ConfigError. @@ -68,7 +68,7 @@ body: ``` validations: required: true - + - type: textarea attributes: label: Security diff --git a/.github/actions/setup-hatch/action.yml b/.github/actions/setup-hatch/action.yml index 7b7780ef..6b15cdbf 100644 --- a/.github/actions/setup-hatch/action.yml +++ b/.github/actions/setup-hatch/action.yml @@ -18,5 +18,13 @@ runs: python-version: ${{ inputs.python-version }} - name: Install dev dependencies + shell: bash run: ${{ inputs.setup-command }} + + - name: Add brew to the PATH + shell: bash + run: echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH + + - name: Install pre-commit shell: bash + run: brew install pre-commit diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index 4f5b392e..9c203847 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -10,17 +10,13 @@ on: permissions: read-all -defaults: - run: - shell: bash - # will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} cancel-in-progress: true jobs: - lint: + code-quality: name: Code Quality runs-on: ubuntu-latest @@ -33,8 +29,6 @@ jobs: - name: Setup `hatch` uses: ./.github/actions/setup-hatch - - name: Run linters - run: hatch run lint:all - - - name: Run typechecks - run: hatch run typecheck:all + - name: Run code quality + shell: bash + run: hatch run code-quality diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml index fd20d9ab..1c2f41b5 100644 --- a/.github/workflows/github-release.yml +++ b/.github/workflows/github-release.yml @@ -256,4 +256,4 @@ jobs: RELEASE_NOTES: ${{ inputs.changelog_path }} COMMIT: ${{ inputs.sha }} PRERELEASE: ${{ steps.release_type.outputs.prerelease }} - DRAFT: ${{ steps.draft.outputs.draft }} \ No newline at end of file + DRAFT: ${{ steps.draft.outputs.draft }} diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml index 32a267e0..b043e19e 100644 --- a/.github/workflows/release_prep_hatch.yml +++ b/.github/workflows/release_prep_hatch.yml @@ -539,4 +539,4 @@ jobs: - name: "Remove Temp Branch - ${{ needs.create-temp-branch.outputs.branch_name }}" if: ${{ inputs.deploy_to == 'prod' && inputs.nightly_release == 'false' && needs.create-temp-branch.outputs.branch_name != '' }} run: | - git push origin -d ${{ needs.create-temp-branch.outputs.branch_name }} \ No newline at end of file + git push origin -d ${{ needs.create-temp-branch.outputs.branch_name }} diff --git a/.gitignore b/.gitignore index cf98fcf8..a14d6d0d 100644 --- a/.gitignore +++ b/.gitignore @@ -153,4 +153,4 @@ dmypy.json cython_debug/ # PyCharm -.idea/ \ No newline at end of file +.idea/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3d80b955..caf34209 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,63 +1,57 @@ -# For more on configuring pre-commit hooks (see https://pre-commit.com/) - -# Force all unspecified python hooks to run python 3.8 default_language_version: - python: python3 + python: python3 repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 - hooks: - - id: check-yaml - args: [--unsafe] - - id: check-json - - id: end-of-file-fixer - - id: trailing-whitespace - - id: check-case-conflict -- repo: https://github.com/psf/black - rev: 23.1.0 - hooks: - - id: black - additional_dependencies: ['click~=8.1'] - args: - - "--line-length=99" - - "--target-version=py38" - - id: black - alias: black-check - stages: [manual] - additional_dependencies: ['click~=8.1'] - args: - - "--line-length=99" - - "--target-version=py38" - - "--check" - - "--diff" -- repo: https://github.com/pycqa/flake8 - rev: 6.0.0 - hooks: - - id: flake8 - - id: flake8 - alias: flake8-check - stages: [manual] -- repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.1.1 - hooks: - - id: mypy - # N.B.: Mypy is... a bit fragile. - # - # By using `language: system` we run this hook in the local - # environment instead of a pre-commit isolated one. This is needed - # to ensure mypy correctly parses the project. +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-yaml + args: [--unsafe] + - id: check-json + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + +- repo: https://github.com/dbt-labs/pre-commit-hooks + rev: v0.1.0a1 + hooks: + - id: dbt-core-in-adapters-check + +- repo: https://github.com/psf/black + rev: 24.4.0 + hooks: + - id: black + args: + - --line-length=99 + - --target-version=py38 + - --target-version=py39 + - --target-version=py310 + - --target-version=py311 + - --force-exclude=dbt/adapters/events/adapter_types_pb2.py + +- repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + exclude: dbt/adapters/events/adapter_types_pb2.py|tests/functional/ + args: + - --max-line-length=99 + - --select=E,F,W + - --ignore=E203,E501,E704,E741,W503,W504 + - --per-file-ignores=*/__init__.py:F401 - # It may cause trouble in that it adds environmental variables out - # of our control to the mix. Unfortunately, there's nothing we can - # do about per pre-commit's author. - # See https://github.com/pre-commit/pre-commit/issues/730 for details. - args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases] - files: ^dbt/adapters/.* - language: system - - id: mypy - alias: mypy-check - stages: [manual] - args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases] - files: ^dbt/adapters - language: system +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.9.0 + hooks: + - id: mypy + exclude: dbt/adapters/events/adapter_types_pb2.py|dbt-tests-adapter/dbt/__init__.py + args: + - --explicit-package-bases + - --ignore-missing-imports + - --pretty + - --show-error-codes + files: ^dbt/adapters/ + additional_dependencies: + - types-PyYAML + - types-protobuf + - types-pytz diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eb0002fa..e1b87103 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -163,7 +163,7 @@ Remember to commit and push the file that's created. ### Signing the CLA -> **_NOTE:_** All contributors to `dbt-adapter` must sign the +> **_NOTE:_** All contributors to `dbt-adapter` must sign the > [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements)(CLA). Maintainers will be unable to merge contributions until the contributor signs the CLA. diff --git a/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py index 6a544af0..8423c9ca 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py +++ b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py @@ -1,6 +1,7 @@ from pathlib import Path from dbt_common.exceptions import CompilationError + # TODO: does this belong in dbt-tests-adapter? from dbt.exceptions import ParsingError import pytest diff --git a/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py b/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py index 6c41ceb9..c5bf2a09 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py +++ b/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py @@ -46,16 +46,22 @@ def models(self): def test_invalid_input(self, project): results = run_dbt(["run"]) assert len(results) == 2 - + _, out = run_dbt_and_capture( ["test", "--select", "test_name:test_invalid_input_column_name"], expect_pass=False ) - assert "Invalid column name: 'invalid_column_name' in unit test fixture for 'my_upstream_model'." in out - + assert ( + "Invalid column name: 'invalid_column_name' in unit test fixture for 'my_upstream_model'." + in out + ) + _, out = run_dbt_and_capture( ["test", "--select", "test_name:test_invalid_expect_column_name"], expect_pass=False ) - assert "Invalid column name: 'invalid_column_name' in unit test fixture for expected output." in out + assert ( + "Invalid column name: 'invalid_column_name' in unit test fixture for expected output." + in out + ) class TestPostgresUnitTestInvalidInput(BaseUnitTestInvalidInput): diff --git a/dbt/adapters/__init__.py b/dbt/adapters/__init__.py index 65e1d483..1713e032 100644 --- a/dbt/adapters/__init__.py +++ b/dbt/adapters/__init__.py @@ -2,6 +2,7 @@ This adds all subdirectories of directories on `sys.path` to this package’s `__path__` . It effectively combines all adapters into a single namespace (dbt.adapter). """ + from pkgutil import extend_path __path__ = extend_path(__path__, __name__) diff --git a/dbt/adapters/base/connections.py b/dbt/adapters/base/connections.py index a3a4d98d..6e038297 100644 --- a/dbt/adapters/base/connections.py +++ b/dbt/adapters/base/connections.py @@ -165,7 +165,9 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection: conn.handle = LazyHandle(self.open) # Add the connection to thread_connections for this thread self.set_thread_connection(conn) - fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())) + fire_event( + NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info()) + ) else: # existing connection either wasn't open or didn't have the right name if conn.state != "open": conn.handle = LazyHandle(self.open) diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py index 3abe5e09..f58f8aba 100644 --- a/dbt/adapters/base/impl.py +++ b/dbt/adapters/base/impl.py @@ -1328,14 +1328,16 @@ def calculate_freshness_from_metadata_batch( # Track schema, identifiers of sources for lookup from batch query schema_identifier_to_source = { ( - source.path.get_lowered_part(ComponentName.Schema), - source.path.get_lowered_part(ComponentName.Identifier), + source.path.get_lowered_part(ComponentName.Schema), # type: ignore + source.path.get_lowered_part(ComponentName.Identifier), # type: ignore ): source for source in sources } # Group metadata sources by information schema -- one query per information schema will be necessary - sources_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = self._get_catalog_relations_by_info_schema(sources) + sources_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = ( + self._get_catalog_relations_by_info_schema(sources) + ) freshness_responses: Dict[BaseRelation, FreshnessResponse] = {} adapter_responses: List[Optional[AdapterResponse]] = [] @@ -1393,7 +1395,9 @@ def _create_freshness_response( return freshness - def _parse_freshness_row(self, row: "agate.Row", table: "agate.Table") -> Tuple[Any, FreshnessResponse]: + def _parse_freshness_row( + self, row: "agate.Row", table: "agate.Table" + ) -> Tuple[Any, FreshnessResponse]: from dbt_common.clients.agate_helper import get_column_value_uncased try: @@ -1404,10 +1408,7 @@ def _parse_freshness_row(self, row: "agate.Row", table: "agate.Table") -> Tuple[ except Exception: raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table) - freshness_response = self._create_freshness_response( - last_modified_val, - snapshotted_at_val - ) + freshness_response = self._create_freshness_response(last_modified_val, snapshotted_at_val) raw_relation = schema.lower().strip(), identifier.lower().strip() return raw_relation, freshness_response diff --git a/dbt/adapters/contracts/relation.py b/dbt/adapters/contracts/relation.py index 6a88d074..3560c2b3 100644 --- a/dbt/adapters/contracts/relation.py +++ b/dbt/adapters/contracts/relation.py @@ -40,11 +40,9 @@ class MaterializationConfig(Mapping, ABC): contract: MaterializationContract extra: Dict[str, Any] - def __contains__(self, item): - ... + def __contains__(self, item): ... - def __delitem__(self, key): - ... + def __delitem__(self, key): ... class RelationConfig(Protocol): diff --git a/dbt/adapters/events/README.md b/dbt/adapters/events/README.md index fe39a18e..c98488db 100644 --- a/dbt/adapters/events/README.md +++ b/dbt/adapters/events/README.md @@ -14,7 +14,7 @@ When events are processed via `fire_event`, nearly everything is logged. Whether We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto. -The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters. +The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters. ## Required for Every Event diff --git a/dbt/adapters/factory.py b/dbt/adapters/factory.py index e63d43e1..b1854f67 100644 --- a/dbt/adapters/factory.py +++ b/dbt/adapters/factory.py @@ -101,17 +101,14 @@ def register_adapter( self, config: AdapterRequiredConfig, mp_context: SpawnContext, - adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO + adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO, ) -> None: adapter_name = config.credentials.type adapter_type = self.get_adapter_class_by_name(adapter_name) adapter_version = self._adapter_version(adapter_name) fire_event( - AdapterRegistered( - adapter_name=adapter_name, - adapter_version=adapter_version - ), - level=adapter_registered_log_level + AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version), + level=adapter_registered_log_level, ) with self.lock: if adapter_name in self.adapters: @@ -199,9 +196,9 @@ def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]: def register_adapter( - config: AdapterRequiredConfig, - mp_context: SpawnContext, - adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO + config: AdapterRequiredConfig, + mp_context: SpawnContext, + adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO, ) -> None: FACTORY.register_adapter(config, mp_context, adapter_registered_log_level) diff --git a/dbt/adapters/protocol.py b/dbt/adapters/protocol.py index bbfdd330..35219866 100644 --- a/dbt/adapters/protocol.py +++ b/dbt/adapters/protocol.py @@ -47,8 +47,7 @@ class ColumnProtocol(Protocol): class RelationProtocol(Protocol): @classmethod - def get_default_quote_policy(cls) -> Policy: - ... + def get_default_quote_policy(cls) -> Policy: ... @classmethod def create_from( @@ -56,8 +55,7 @@ def create_from( quoting: HasQuoting, relation_config: RelationConfig, **kwargs: Any, - ) -> Self: - ... + ) -> Self: ... AdapterConfig_T = TypeVar("AdapterConfig_T", bound=AdapterConfig) @@ -73,8 +71,7 @@ def __call__( config: AdapterRequiredConfig, macro_resolver: MacroResolverProtocol, package_name: Optional[str], - ) -> Dict[str, Any]: - ... + ) -> Dict[str, Any]: ... # TODO CT-211 @@ -96,81 +93,58 @@ class AdapterProtocol( # type: ignore[misc] ConnectionManager: Type[ConnectionManager_T] connections: ConnectionManager_T - def __init__(self, config: AdapterRequiredConfig) -> None: - ... + def __init__(self, config: AdapterRequiredConfig) -> None: ... - def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: - ... + def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: ... - def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: - ... + def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: ... - def clear_macro_resolver(self) -> None: - ... + def clear_macro_resolver(self) -> None: ... def set_macro_context_generator( self, macro_context_generator: MacroContextGeneratorCallable, - ) -> None: - ... + ) -> None: ... @classmethod def type(cls) -> str: pass - def set_query_header(self, query_header_context: Dict[str, Any]) -> None: - ... + def set_query_header(self, query_header_context: Dict[str, Any]) -> None: ... @staticmethod - def get_thread_identifier() -> Hashable: - ... + def get_thread_identifier() -> Hashable: ... - def get_thread_connection(self) -> Connection: - ... + def get_thread_connection(self) -> Connection: ... - def set_thread_connection(self, conn: Connection) -> None: - ... + def set_thread_connection(self, conn: Connection) -> None: ... - def get_if_exists(self) -> Optional[Connection]: - ... + def get_if_exists(self) -> Optional[Connection]: ... - def clear_thread_connection(self) -> None: - ... + def clear_thread_connection(self) -> None: ... - def clear_transaction(self) -> None: - ... + def clear_transaction(self) -> None: ... - def exception_handler(self, sql: str) -> ContextManager: - ... + def exception_handler(self, sql: str) -> ContextManager: ... - def set_connection_name(self, name: Optional[str] = None) -> Connection: - ... + def set_connection_name(self, name: Optional[str] = None) -> Connection: ... - def cancel_open(self) -> Optional[List[str]]: - ... + def cancel_open(self) -> Optional[List[str]]: ... - def open(cls, connection: Connection) -> Connection: - ... + def open(cls, connection: Connection) -> Connection: ... - def release(self) -> None: - ... + def release(self) -> None: ... - def cleanup_all(self) -> None: - ... + def cleanup_all(self) -> None: ... - def begin(self) -> None: - ... + def begin(self) -> None: ... - def commit(self) -> None: - ... + def commit(self) -> None: ... - def close(cls, connection: Connection) -> Connection: - ... + def close(cls, connection: Connection) -> Connection: ... - def commit_if_has_connection(self) -> None: - ... + def commit_if_has_connection(self) -> None: ... def execute( self, sql: str, auto_begin: bool = False, fetch: bool = False - ) -> Tuple[AdapterResponse, "agate.Table"]: - ... + ) -> Tuple[AdapterResponse, "agate.Table"]: ... diff --git a/dbt/adapters/relation_configs/README.md b/dbt/adapters/relation_configs/README.md index 6be3bc59..22d6bf78 100644 --- a/dbt/adapters/relation_configs/README.md +++ b/dbt/adapters/relation_configs/README.md @@ -1,6 +1,6 @@ # RelationConfig This package serves as an initial abstraction for managing the inspection of existing relations and determining -changes on those relations. It arose from the materialized view work and is currently only supporting +changes on those relations. It arose from the materialized view work and is currently only supporting materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main classes in this package. diff --git a/dbt/adapters/relation_configs/config_change.py b/dbt/adapters/relation_configs/config_change.py index 9d3c8e01..a776dc6b 100644 --- a/dbt/adapters/relation_configs/config_change.py +++ b/dbt/adapters/relation_configs/config_change.py @@ -16,7 +16,9 @@ class RelationConfigChangeAction(StrEnum): @dataclass(frozen=True, eq=True, unsafe_hash=True) class RelationConfigChange(RelationConfigBase, ABC): action: RelationConfigChangeAction - context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited + context: ( + Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited + ) @property @abstractmethod diff --git a/dbt/include/global_project/macros/materializations/tests/helpers.sql b/dbt/include/global_project/macros/materializations/tests/helpers.sql index ead727d9..a385d1ea 100644 --- a/dbt/include/global_project/macros/materializations/tests/helpers.sql +++ b/dbt/include/global_project/macros/materializations/tests/helpers.sql @@ -41,4 +41,4 @@ dbt_internal_unit_test_expected as ( select * from dbt_internal_unit_test_actual union all select * from dbt_internal_unit_test_expected -{%- endmacro %} \ No newline at end of file +{%- endmacro %} diff --git a/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql b/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql index ca39a39c..53d7a93b 100644 --- a/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql +++ b/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql @@ -79,7 +79,7 @@ union all {%- endif -%} {%- set column_type = column_name_to_data_types[column_name] %} - + {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#} {%- set column_value_clean = column_value -%} {%- if column_value is string -%} diff --git a/pyproject.toml b/pyproject.toml index 74c2374e..b08a8159 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,35 +54,16 @@ include = ["dbt/adapters", "dbt/include", "dbt/__init__.py"] [tool.hatch.envs.default] dependencies = [ "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", + 'pre-commit==3.7.0;python_version>="3.9"', + 'pre-commit==3.5.0;python_version=="3.8"', ] - -[tool.hatch.envs.lint] -detached = true -dependencies = [ - "black>=24.3", - "flake8", - "Flake8-pyproject", -] -[tool.hatch.envs.lint.scripts] -all = [ - "- black-only", - "- flake8-only", -] -black-only = "python -m black ." -flake8-only = "python -m flake8 ." - -[tool.hatch.envs.typecheck] -dependencies = [ - "mypy", - "types-PyYAML", - "types-protobuf", - "types-pytz", -] -[tool.hatch.envs.typecheck.scripts] -all = "python -m mypy ." +[tool.hatch.envs.default.scripts] +dev = "pre-commit install" +code-quality = "pre-commit run --all-files" [tool.hatch.envs.unit-tests] dependencies = [ + "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", "pytest", "pytest-dotenv", "pytest-xdist", @@ -114,37 +95,8 @@ check-sdist = [ ] protobuf = "protoc -I=./dbt/adapters/events --python_out=./dbt/adapters/events ./dbt/adapters/events/adapter_types.proto" -[tool.black] -extend-exclude = "dbt/adapters/events/adapter_types_pb2.py" -line-length = 99 -target-version = ['py38'] - -[tool.flake8] -select = ["E", "W", "F"] -ignore = ["E203", "E501", "E741", "W503", "W504"] -exclude = [ - "dbt/adapters/events/adapter_types_pb2.py", - "tests/functional", - "venv", -] -per-file-ignores = ["*/__init__.py: F401"] - [tool.mypy] -namespace_packages = true -show_error_codes = true -explicit_package_bases = true -ignore_missing_imports = true -pretty = true mypy_path = "third-party-stubs/" -files = [ - "dbt", - "tests/unit", -] -exclude = [ - "dbt/adapters/events/adapter_types_pb2.py", - "dbt-tests-adapter/dbt/__init__.py", # overlaps with `dbt/__init__.py` as expected for namespaces - "venv", -] [[tool.mypy.overrides]] module = ["dbt.adapters.events.adapter_types_pb2"] follow_imports = "skip"