From 5dd6e7dbb08a47d58a24a2bc87cb6b1c560e7968 Mon Sep 17 00:00:00 2001 From: Mike Alfare Date: Sat, 7 Dec 2024 18:13:52 -0500 Subject: [PATCH 01/23] update to pyproject.toml --- .bumpversion.cfg | 33 --------- .github/workflows/main.yml | 52 ++------------- .github/workflows/release-internal.yml | 26 ++------ .github/workflows/release-prep.yml | 78 ++++++---------------- .github/workflows/version-bump.yml | 28 -------- .pre-commit-config.yaml | 2 + MANIFEST.in | 1 - Makefile | 44 ------------ dagger/requirements.txt | 3 - dev-requirements.txt | 20 ------ hatch.toml | 64 ++++++++++++++++++ pyproject.toml | 59 +++++++++++++++++ pytest.ini | 9 --- requirements.txt | 11 --- setup.py | 92 -------------------------- tox.ini | 3 - 16 files changed, 157 insertions(+), 368 deletions(-) delete mode 100644 .github/workflows/version-bump.yml delete mode 100644 MANIFEST.in delete mode 100644 Makefile delete mode 100644 dagger/requirements.txt delete mode 100644 dev-requirements.txt create mode 100644 hatch.toml create mode 100644 pyproject.toml delete mode 100644 pytest.ini delete mode 100644 requirements.txt delete mode 100644 setup.py delete mode 100644 tox.ini diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f5eb7b79d..e69de29bb 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,33 +0,0 @@ -[bumpversion] -current_version = 1.10.0a1 -parse = (?P[\d]+) # major version number - \.(?P[\d]+) # minor version number - \.(?P[\d]+) # patch version number - (((?Pa|b|rc) # optional pre-release type - ?(?P[\d]+?)) # optional pre-release version number - \.?(?P[a-z0-9]+\+[a-z]+)? # optional nightly release indicator - )? # expected matches: `1.5.0`, `1.5.0a1`, `1.5.0a1.dev123457+nightly`, expected failures: `1`, `1.5`, `1.5.2-a1`, `text1.5.0` -serialize = - {major}.{minor}.{patch}{prekind}{num}.{nightly} - {major}.{minor}.{patch}{prekind}{num} - {major}.{minor}.{patch} -commit = False -tag = False - -[bumpversion:part:prekind] -first_value = a -optional_value = final -values = - a - b - rc - final - -[bumpversion:part:num] -first_value = 1 - -[bumpversion:part:nightly] - -[bumpversion:file:setup.py] - -[bumpversion:file:dbt/adapters/spark/__version__.py] diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 287e5acb7..458048a7f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -51,20 +51,8 @@ jobs: with: python-version: '3.9' - - name: Install python dependencies - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install pre-commit - pre-commit --version - python -m pip install -r requirements.txt - python -m pip install -r dev-requirements.txt - python -c "import dbt.adapters.spark" - - name: Run pre-commit hooks - run: pre-commit run --all-files --show-diff-on-failure + uses: pre-commit/action@v3.0.1 unit: name: unit test / python ${{ matrix.python-version }} @@ -87,29 +75,9 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install python dependencies - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install -r requirements.txt - python -m pip install -r dev-requirements.txt - python -m pip install -e . - - - name: Run unit tests - run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit + uses: pypa/hatch@install - - name: Get current date - if: always() - id: date - run: echo "date=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts - - - uses: actions/upload-artifact@v4 - if: always() - with: - name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv - path: unit_results.csv - overwrite: true + - run: hatch run unit-tests build: name: build packages @@ -128,24 +96,16 @@ jobs: with: python-version: '3.9' - - name: Install python dependencies - run: | - python -m pip install --user --upgrade pip - python -m pip install --upgrade setuptools wheel twine check-wheel-contents - python -m pip --version + - uses: pypa/hatch@install - name: Build distributions - run: ./scripts/build-dist.sh + run: hatch build - name: Show distributions run: ls -lh dist/ - name: Check distribution descriptions - run: | - twine check dist/* - - name: Check wheel contents - run: | - check-wheel-contents dist/*.whl --ignore W007,W008 + run: hatch run build:check-all - name: Check if this is an alpha version id: check-is-alpha diff --git a/.github/workflows/release-internal.yml b/.github/workflows/release-internal.yml index 702ef9aea..c467d3fce 100644 --- a/.github/workflows/release-internal.yml +++ b/.github/workflows/release-internal.yml @@ -56,17 +56,9 @@ jobs: python-version: "${{ env.PYTHON_TARGET_VERSION }}" - name: Install python dependencies - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install -r requirements.txt - python -m pip install -r dev-requirements.txt - python -m pip install -e . + uses: pypa/hatch@install - - name: Run unit tests - run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit + - run: hatch run unit-tests run-integration-tests: name: "${{ matrix.test }}" @@ -102,21 +94,11 @@ jobs: steps: - name: Check out the repository - if: github.event_name != 'pull_request_target' - uses: actions/checkout@v4 - with: - persist-credentials: false - - # explicitly checkout the branch for the PR, - # this is necessary for the `pull_request` event - - name: Check out the repository (PR) - if: github.event_name == 'pull_request_target' uses: actions/checkout@v4 with: persist-credentials: false - ref: ${{ github.event.pull_request.head.ref }} - # the python version used here is not what is used in the tests themselves + # the python version used here is not what is used in the tests themselves - name: Set up Python for dagger uses: actions/setup-python@v5 with: @@ -140,6 +122,6 @@ jobs: package_test_command: "${{ inputs.package_test_command }}" dbms_name: "spark" ref: "${{ inputs.ref }}" - skip_tests: "${{ inputs.skip_tests }}" + skip_tests: "true" secrets: "inherit" diff --git a/.github/workflows/release-prep.yml b/.github/workflows/release-prep.yml index d5878ec1e..bffbb54e0 100644 --- a/.github/workflows/release-prep.yml +++ b/.github/workflows/release-prep.yml @@ -168,8 +168,8 @@ jobs: run: | if [[ ${{ steps.set_existence.outputs.exists }} != true ]] then - title="Spark version-bump.yml check" - message="dbt-spark needs version-bump.yml run before running the release. The changelog is not up to date." + title="Spark version bump check" + message="dbt-spark needs a version bump before running the release. The changelog is not up to date." echo "::error title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" exit 1 fi @@ -186,7 +186,7 @@ jobs: runs-on: ubuntu-latest outputs: - up_to_date: ${{ steps.version-check.outputs.up_to_date }} + up_to_date: ${{ steps.version.outputs.is-current }} steps: - name: "Checkout ${{ github.repository }} Commit ${{ inputs.sha }}" @@ -194,20 +194,24 @@ jobs: with: ref: ${{ inputs.sha }} + - uses: pypa/hatch@install + - name: "Check Current Version In Code" - id: version-check + id: version run: | is_updated=false - if grep -Fxq "current_version = ${{ inputs.version_number }}" .bumpversion.cfg + is_current=false + current_version=$(hatch version) + if test "$current_version" = "${{ inputs.version }}" then - is_updated=true + is_current=true fi - echo "up_to_date=$is_updated" >> $GITHUB_OUTPUT + echo "is-current=$is_current" >> $GITHUB_OUTPUT - name: "[Notification] Check Current Version In Code" run: | title="Version check" - if [[ ${{ steps.version-check.outputs.up_to_date }} == true ]] + if [[ ${{ steps.version.outputs.is-current }} == true ]] then message="The version in the codebase is equal to the provided version" else @@ -218,7 +222,7 @@ jobs: - name: "Spark safety check" if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }} run: | - if [[ ${{ steps.version-check.outputs.up_to_date }} != true ]] + if [[ ${{ steps.version.outputs.is-current }} != true ]] then title="Spark version-bump.yml check" message="dbt-spark needs version-bump.yml run before running the release. The version bump is not up to date." @@ -228,7 +232,7 @@ jobs: - name: "[DEBUG] Print Outputs" run: | - echo up_to_date: ${{ steps.version-check.outputs.up_to_date }} + echo up_to_date: ${{ steps.version.outputs.is-current }} skip-generate-changelog: runs-on: ubuntu-latest @@ -383,26 +387,10 @@ jobs: with: python-version: ${{ env.PYTHON_TARGET_VERSION }} - - name: "Install Python Dependencies" - if: needs.audit-version-in-code.outputs.up_to_date == 'false' - run: | - python3 -m venv env - source env/bin/activate - python -m pip install --upgrade pip + - uses: pypa/hatch@install - name: "Bump Version To ${{ inputs.version_number }}" - if: needs.audit-version-in-code.outputs.up_to_date == 'false' - # note: bumpversion is no longer supported, it actually points to bump2version now - run: | - source env/bin/activate - if [ -f "editable-requirements.txt" ] - then - python -m pip install -r dev-requirements.txt -r editable-requirements.txt - else - python -m pip install -r dev-requirements.txt - fi - env/bin/bumpversion --allow-dirty --new-version ${{ inputs.version_number }} major - git status + run: hatch version ${{ inputs.version_number }} - name: "[Notification] Bump Version To ${{ inputs.version_number }}" if: needs.audit-version-in-code.outputs.up_to_date == 'false' @@ -415,14 +403,14 @@ jobs: - name: "Remove Trailing Whitespace Via Pre-commit" continue-on-error: true run: | - pre-commit run trailing-whitespace --files .bumpversion.cfg CHANGELOG.md .changes/* + pre-commit run trailing-whitespace --files CHANGELOG.md .changes/* git status # this step will fail on newline errors but also correct them - name: "Removing Extra Newlines Via Pre-commit" continue-on-error: true run: | - pre-commit run end-of-file-fixer --files .bumpversion.cfg CHANGELOG.md .changes/* + pre-commit run end-of-file-fixer --files CHANGELOG.md .changes/* git status - name: "Commit & Push Changes" @@ -459,18 +447,10 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install python dependencies - run: | - sudo apt-get update - sudo apt-get install libsasl2-dev - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install -r requirements.txt - python -m pip install -r dev-requirements.txt - python -m pip install -e . + - uses: pypa/hatch@install - name: Run unit tests - run: python -m pytest --color=yes --csv unit_results.csv -v tests/unit + run: hatch run unit-tests run-integration-tests: name: ${{ matrix.test }} @@ -505,34 +485,20 @@ jobs: steps: - name: Check out the repository - if: github.event_name != 'pull_request_target' uses: actions/checkout@v4 with: persist-credentials: false - # explicitly checkout the branch for the PR, - # this is necessary for the `pull_request` event - - name: Check out the repository (PR) - if: github.event_name == 'pull_request_target' - uses: actions/checkout@v4 - with: - persist-credentials: false - ref: ${{ github.event.pull_request.head.sha }} - # the python version used here is not what is used in the tests themselves - name: Set up Python for dagger uses: actions/setup-python@v5 with: python-version: "3.11" - - name: Install python dependencies - run: | - python -m pip install --user --upgrade pip - python -m pip --version - python -m pip install -r dagger/requirements.txt + - uses: pypa/hatch@install - name: Run tests for ${{ matrix.test }} - run: python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }} + run: hatch run python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }} merge-changes-into-target-branch: runs-on: ubuntu-latest diff --git a/.github/workflows/version-bump.yml b/.github/workflows/version-bump.yml deleted file mode 100644 index bde34d683..000000000 --- a/.github/workflows/version-bump.yml +++ /dev/null @@ -1,28 +0,0 @@ -# **what?** -# This workflow will take the new version number to bump to. With that -# it will run versionbump to update the version number everywhere in the -# code base and then run changie to create the corresponding changelog. -# A PR will be created with the changes that can be reviewed before committing. - -# **why?** -# This is to aid in releasing dbt and making sure we have updated -# the version in all places and generated the changelog. - -# **when?** -# This is triggered manually - -name: Version Bump - -on: - workflow_dispatch: - inputs: - version_number: - description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)' - required: true - -jobs: - version_bump_and_changie: - uses: dbt-labs/actions/.github/workflows/version-bump.yml@main - with: - version_number: ${{ inputs.version_number }} - secrets: inherit # ok since what we are calling is internally maintained diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6697bbeb5..1a34810bf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,5 +54,7 @@ repos: - --pretty files: ^dbt/adapters additional_dependencies: + - types-PyYAML + - types-python-dateutil - types-pytz - types-requests diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index cfbc714ed..000000000 --- a/MANIFEST.in +++ /dev/null @@ -1 +0,0 @@ -recursive-include dbt/include *.sql *.yml *.md diff --git a/Makefile b/Makefile deleted file mode 100644 index 46b9af294..000000000 --- a/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -.DEFAULT_GOAL:=help - -.PHONY: dev -dev: ## Installs adapter in develop mode along with development dependencies - @\ - pip install -e . -r requirements.txt -r dev-requirements.txt -r dagger/requirements.txt && pre-commit install - -.PHONY: dev-uninstall -dev-uninstall: ## Uninstalls all packages while maintaining the virtual environment - ## Useful when updating versions, or if you accidentally installed into the system interpreter - pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y - pip uninstall -y dbt-spark - -.PHONY: lint -lint: ## Runs flake8 and mypy code checks against staged changes. - @\ - pre-commit run --all-files - -.PHONY: unit -unit: ## Runs unit tests with py39. - @\ - python -m pytest tests/unit - -.PHONY: test -test: ## Runs unit tests with py39 and code checks against staged changes. - @\ - python -m pytest tests/unit; \ - python dagger/run_dbt_spark_tests.py --profile spark_session \ - pre-commit run --all-files - -.PHONY: clean - @echo "cleaning repo" - @git clean -f -X - -.PHONY: help -help: ## Show this help message. - @echo 'usage: make [target]' - @echo - @echo 'targets:' - @grep -E '^[7+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -.PHONY: docker-prod -docker-prod: - docker build -f docker/Dockerfile -t dbt-spark . diff --git a/dagger/requirements.txt b/dagger/requirements.txt deleted file mode 100644 index f150e3093..000000000 --- a/dagger/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -beartype<0.18.0 -dagger-io~=0.9.7 -python-dotenv diff --git a/dev-requirements.txt b/dev-requirements.txt deleted file mode 100644 index 3947695c7..000000000 --- a/dev-requirements.txt +++ /dev/null @@ -1,20 +0,0 @@ -# install latest changes in dbt-core -# TODO: how to automate switching from develop to version branches? -git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core -git+https://github.com/dbt-labs/dbt-common.git -git+https://github.com/dbt-labs/dbt-adapters.git -git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter - -# dev -ipdb~=0.13.13 -pre-commit~=3.7.0 -pytest~=7.4 -pytest-csv~=3.0 -pytest-dotenv~=0.5.2 -pytest-logbook~=1.2 -pytest-xdist~=3.6 - -# build -bumpversion~=0.6.0 -twine~=4.0 -wheel~=0.43 diff --git a/hatch.toml b/hatch.toml new file mode 100644 index 000000000..37ba0fce4 --- /dev/null +++ b/hatch.toml @@ -0,0 +1,64 @@ +[version] +path = "dbt/adapters/redshift/__version__.py" + +[build.targets.sdist] +packages = ["dbt"] + +[build.targets.wheel] +packages = ["dbt"] + +[envs.default] +dependencies = [ + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", + "beartype<0.18.0", + "dagger-io~=0.9.7", + "ddtrace==2.3.0", + "ipdb~=0.13.13", + "pre-commit==3.7.0", + "freezegun", + "pytest>=7.0,<8.0", + "pytest-csv~=3.0", + "pytest-dotenv", + "pytest-logbook~=1.2", + "pytest-mock", + "pytest-xdist", +] + +[envs.default.scripts] +setup = "pre-commit install" +code-quality = "pre-commit run --all-files" +unit-tests = "python -m pytest {args:tests/unit}" +integration-tests = "python -m pytest {args:tests/functional}" +docker-dev = [ + "docker build -f docker/dev.Dockerfile -t dbt-redshift-dev .", + "docker run --rm -it --name dbt-redshift-dev -v $(shell pwd):/opt/code dbt-redshift-dev", +] +docker-prod = "docker build -f docker/Dockerfile -t dbt-spark ." + +[envs.build] +detached = true +dependencies = [ + "wheel", + "twine", + "check-wheel-contents", +] + +[envs.build.scripts] +check-all = [ + "- check-wheel", + "- check-sdist", +] +check-wheel = [ + "twine check dist/*", + "find ./dist/dbt_redshift-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-redshift", +] +check-sdist = [ + "check-wheel-contents dist/*.whl --ignore W007,W008", + "find ./dist/dbt_redshift-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-redshift", +] +docker-prod = "docker build -f docker/Dockerfile -t dbt-redshift ." diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..481cc9e3f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,59 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +dynamic = ["version"] +name = "dbt-spark" +description = "The Apache Spark adapter plugin for dbt" +readme = "README.md" +keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs", "spark"] +requires-python = ">=3.9.0" +authors = [{ name = "dbt Labs", email = "info@dbtlabs.com" }] +maintainers = [{ name = "dbt Labs", email = "info@dbtlabs.com" }] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +dependencies = [ + "dbt-common>=1.10,<2.0", + "dbt-adapters>=1.7,<2.0", + # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency + "dbt-core>=1.8.0", +] +[project.optional-dependencies] +ODBC = ["pyodbc~=5.1.0"] +PyHive = [ + "PyHive[hive_pure_sasl]~=0.7.0", + "thrift>=0.11.0,<0.17.0", +] +session = ["pyspark>=3.0.0,<4.0.0"] +all = [ + "pyodbc~=5.1.0", + "PyHive[hive_pure_sasl]~=0.7.0", + "thrift>=0.11.0,<0.17.0", + "pyspark>=3.0.0,<4.0.0", +] + +[project.urls] +Homepage = "https://github.com/dbt-labs/dbt-spark" +Documentation = "https://docs.getdbt.com" +Repository = "https://github.com/dbt-labs/dbt-spark.git" +Issues = "https://github.com/dbt-labs/dbt-spark/issues" +Changelog = "https://github.com/dbt-labs/dbt-spark/blob/main/CHANGELOG.md" + +[tool.pytest] +testpaths = ["tests/functional", "tests/unit"] +addopts = "-v -n auto" +color = true +filterwarnings = [ + "ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning", + "ignore:unclosed file .*:ResourceWarning", +] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index b3d74bc14..000000000 --- a/pytest.ini +++ /dev/null @@ -1,9 +0,0 @@ -[pytest] -filterwarnings = - ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning - ignore:unclosed file .*:ResourceWarning -env_files = - test.env -testpaths = - tests/unit - tests/functional diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 412630919..000000000 --- a/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -pyhive[hive_pure_sasl]~=0.7.0 -requests>=2.28.1 - -pyodbc~=5.1.0 --no-binary pyodbc -sqlparams>=3.0.0 -thrift>=0.13.0 -pyspark>=3.0.0,<4.0.0 -sqlparse>=0.4.2 # not directly required, pinned by Snyk to avoid a vulnerability - -types-PyYAML -types-python-dateutil diff --git a/setup.py b/setup.py deleted file mode 100644 index aa3be6616..000000000 --- a/setup.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python -import os -import sys -import re - -# require python 3.8 or newer -if sys.version_info < (3, 9): - print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.9 or higher.") - sys.exit(1) - -# require version of setuptools that supports find_namespace_packages -from setuptools import setup - -try: - from setuptools import find_namespace_packages -except ImportError: - # the user has a downlevel version of setuptools. - print("Error: dbt requires setuptools v40.1.0 or higher.") - print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") - sys.exit(1) - -# pull long description from README -this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, "README.md"), "r", encoding="utf8") as f: - long_description = f.read() - - -# get this package's version from dbt/adapters//__version__.py -def _get_plugin_version_dict(): - _version_path = os.path.join(this_directory, "dbt", "adapters", "spark", "__version__.py") - _semver = r"""(?P\d+)\.(?P\d+)\.(?P\d+)""" - _pre = r"""((?Pa|b|rc)(?P
\d+))?"""
-    _build = r"""(\+build[0-9]+)?"""
-    _version_pattern = rf"""version\s*=\s*["']{_semver}{_pre}{_build}["']"""
-    with open(_version_path) as f:
-        match = re.search(_version_pattern, f.read().strip())
-        if match is None:
-            raise ValueError(f"invalid version at {_version_path}")
-        return match.groupdict()
-
-
-package_name = "dbt-spark"
-package_version = "1.10.0a1"
-description = """The Apache Spark adapter plugin for dbt"""
-
-odbc_extras = ["pyodbc~=5.1.0"]
-pyhive_extras = [
-    "PyHive[hive_pure_sasl]~=0.7.0",
-    "thrift>=0.11.0,<0.17.0",
-]
-session_extras = ["pyspark>=3.0.0,<4.0.0"]
-all_extras = odbc_extras + pyhive_extras + session_extras
-
-setup(
-    name=package_name,
-    version=package_version,
-    description=description,
-    long_description=long_description,
-    long_description_content_type="text/markdown",
-    author="dbt Labs",
-    author_email="info@dbtlabs.com",
-    url="https://github.com/dbt-labs/dbt-spark",
-    packages=find_namespace_packages(include=["dbt", "dbt.*"]),
-    include_package_data=True,
-    install_requires=[
-        "sqlparams>=3.0.0",
-        "dbt-common>=1.10,<2.0",
-        "dbt-adapters>=1.7,<2.0",
-        # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency
-        "dbt-core>=1.8.0",
-    ],
-    extras_require={
-        "ODBC": odbc_extras,
-        "PyHive": pyhive_extras,
-        "session": session_extras,
-        "all": all_extras,
-    },
-    zip_safe=False,
-    classifiers=[
-        "Development Status :: 5 - Production/Stable",
-        "License :: OSI Approved :: Apache Software License",
-        "Operating System :: Microsoft :: Windows",
-        "Operating System :: MacOS :: MacOS X",
-        "Operating System :: POSIX :: Linux",
-        "Programming Language :: Python :: 3.9",
-        "Programming Language :: Python :: 3.10",
-        "Programming Language :: Python :: 3.11",
-        "Programming Language :: Python :: 3.12",
-    ],
-    python_requires=">=3.9",
-)
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index 62bb9c5b0..000000000
--- a/tox.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[tox]
-skipsdist = True
-envlist = unit, flake8, integration-spark-thrift

From bcaeb159ba46f6b718235a42a2cbfadaa96f1b20 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Sat, 7 Dec 2024 18:18:38 -0500
Subject: [PATCH 02/23] changelog

---
 .changes/unreleased/Under the Hood-20241207-181814.yaml | 6 ++++++
 1 file changed, 6 insertions(+)
 create mode 100644 .changes/unreleased/Under the Hood-20241207-181814.yaml

diff --git a/.changes/unreleased/Under the Hood-20241207-181814.yaml b/.changes/unreleased/Under the Hood-20241207-181814.yaml
new file mode 100644
index 000000000..c76974d43
--- /dev/null
+++ b/.changes/unreleased/Under the Hood-20241207-181814.yaml	
@@ -0,0 +1,6 @@
+kind: Under the Hood
+body: Move from setup.py to pyproject.toml and to hatch as a dev tool
+time: 2024-12-07T18:18:14.85697-05:00
+custom:
+  Author: mikealfare
+  Issue: "1150"

From fb1b75d3707a498547b301ab3f94e32b7ee37bc9 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Sat, 7 Dec 2024 18:32:31 -0500
Subject: [PATCH 03/23] reduce pr noise

---
 .github/workflows/release-prep.yml | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/release-prep.yml b/.github/workflows/release-prep.yml
index bffbb54e0..ce07447da 100644
--- a/.github/workflows/release-prep.yml
+++ b/.github/workflows/release-prep.yml
@@ -186,7 +186,7 @@ jobs:
     runs-on: ubuntu-latest
 
     outputs:
-      up_to_date: ${{ steps.version.outputs.is-current }}
+      up_to_date: ${{ steps.version-check.outputs.up_to_date }}
 
     steps:
       - name: "Checkout ${{ github.repository }} Commit ${{ inputs.sha }}"
@@ -197,21 +197,20 @@ jobs:
       - uses: pypa/hatch@install
 
       - name: "Check Current Version In Code"
-        id: version
+        id: version-check
         run: |
           is_updated=false
-          is_current=false
           current_version=$(hatch version)
           if test "$current_version" = "${{ inputs.version }}"
           then
-            is_current=true
+            is_updated=true
           fi
-          echo "is-current=$is_current" >> $GITHUB_OUTPUT
+          echo "up_to_date=$is_updated" >> $GITHUB_OUTPUT
 
       - name: "[Notification] Check Current Version In Code"
         run: |
           title="Version check"
-          if [[ ${{ steps.version.outputs.is-current }} == true ]]
+          if [[ ${{ steps.version-check.outputs.up_to_date }} == true ]]
           then
             message="The version in the codebase is equal to the provided version"
           else
@@ -222,7 +221,7 @@ jobs:
       - name: "Spark safety check"
         if: ${{ contains(github.repository, 'dbt-labs/dbt-spark') }}
         run: |
-          if [[ ${{ steps.version.outputs.is-current }} != true ]]
+          if [[ ${{ steps.version-check.outputs.up_to_date }} != true ]]
           then
             title="Spark version-bump.yml check"
             message="dbt-spark needs version-bump.yml run before running the release.  The version bump is not up to date."
@@ -232,7 +231,7 @@ jobs:
 
       - name: "[DEBUG] Print Outputs"
         run: |
-          echo up_to_date: ${{ steps.version.outputs.is-current }}
+          echo up_to_date: ${{ steps.version-check.outputs.up_to_date }}
 
   skip-generate-changelog:
     runs-on: ubuntu-latest

From 5857920a34d8d893e80d1958216e9a53d66575c6 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 9 Dec 2024 13:10:36 -0500
Subject: [PATCH 04/23] replace redshift references with spark

---
 hatch.toml | 16 ++++++----------
 1 file changed, 6 insertions(+), 10 deletions(-)

diff --git a/hatch.toml b/hatch.toml
index 37ba0fce4..d316465ac 100644
--- a/hatch.toml
+++ b/hatch.toml
@@ -1,5 +1,5 @@
 [version]
-path = "dbt/adapters/redshift/__version__.py"
+path = "dbt/adapters/spark/__version__.py"
 
 [build.targets.sdist]
 packages = ["dbt"]
@@ -32,10 +32,6 @@ setup = "pre-commit install"
 code-quality = "pre-commit run --all-files"
 unit-tests = "python -m pytest {args:tests/unit}"
 integration-tests = "python -m pytest {args:tests/functional}"
-docker-dev = [
-    "docker build -f docker/dev.Dockerfile -t dbt-redshift-dev .",
-	"docker run --rm -it --name dbt-redshift-dev -v $(shell pwd):/opt/code dbt-redshift-dev",
-]
 docker-prod = "docker build -f docker/Dockerfile -t dbt-spark ."
 
 [envs.build]
@@ -53,12 +49,12 @@ check-all = [
 ]
 check-wheel = [
     "twine check dist/*",
-    "find ./dist/dbt_redshift-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
-    "pip freeze | grep dbt-redshift",
+    "find ./dist/dbt_spark-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
+    "pip freeze | grep dbt-spark",
 ]
 check-sdist = [
     "check-wheel-contents dist/*.whl --ignore W007,W008",
-    "find ./dist/dbt_redshift-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
-    "pip freeze | grep dbt-redshift",
+    "find ./dist/dbt_spark-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
+    "pip freeze | grep dbt-spark",
 ]
-docker-prod = "docker build -f docker/Dockerfile -t dbt-redshift ."
+docker-prod = "docker build -f docker/Dockerfile -t dbt-spark ."

From a153d20ad3e03aa99c136765bc9251225166e471 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 9 Dec 2024 13:15:53 -0500
Subject: [PATCH 05/23] add back sqlparams dep that was accidentally dropped
 during migration from setup.py to pyproject.toml

---
 pyproject.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/pyproject.toml b/pyproject.toml
index 481cc9e3f..02d9a462a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,6 +27,7 @@ dependencies = [
     "dbt-adapters>=1.7,<2.0",
     # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency
     "dbt-core>=1.8.0",
+    "sqlparams>=3.0.0",
 ]
 [project.optional-dependencies]
 ODBC = ["pyodbc~=5.1.0"]

From 01d378b327f26f551ebf29dc223fd69da0620995 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 9 Dec 2024 13:21:03 -0500
Subject: [PATCH 06/23] install all optional dependencies in the default
 environment for testing

---
 hatch.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/hatch.toml b/hatch.toml
index d316465ac..a255a6d0a 100644
--- a/hatch.toml
+++ b/hatch.toml
@@ -26,6 +26,7 @@ dependencies = [
     "pytest-mock",
     "pytest-xdist",
 ]
+features=["all"]
 
 [envs.default.scripts]
 setup = "pre-commit install"

From efcab9d45d8ca5dbe47f78c5c95274029d7cecfb Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 9 Dec 2024 13:34:35 -0500
Subject: [PATCH 07/23] update dependency updater for testing feature branches

---
 .github/scripts/update_dependencies.sh            | 6 +++---
 .github/scripts/update_dev_dependency_branches.sh | 4 ++--
 .github/workflows/integration.yml                 | 5 ++---
 3 files changed, 7 insertions(+), 8 deletions(-)

diff --git a/.github/scripts/update_dependencies.sh b/.github/scripts/update_dependencies.sh
index c3df48e52..fabdadff2 100644
--- a/.github/scripts/update_dependencies.sh
+++ b/.github/scripts/update_dependencies.sh
@@ -2,9 +2,9 @@
 set -e
 
 git_branch=$1
-target_req_file="dev-requirements.txt"
-core_req_sed_pattern="s|dbt-core.git.*#egg=dbt-core|dbt-core.git@${git_branch}#egg=dbt-core|g"
-tests_req_sed_pattern="s|dbt-core.git.*#egg=dbt-tests|dbt-core.git@${git_branch}#egg=dbt-tests|g"
+target_req_file="hatch.toml"
+core_req_sed_pattern="s|dbt-core.git.*#subdirectory=core|dbt-core.git@${git_branch}#subdirectory=core|g"
+tests_req_sed_pattern="s|dbt-adapters.git.*#subdirectory=dbt-tests-adapter|dbt-adapters.git@${git_branch}#subdirectory=dbt-tests-adapter|g"
 if [[ "$OSTYPE" == darwin* ]]; then
  # mac ships with a different version of sed that requires a delimiter arg
  sed -i "" "$core_req_sed_pattern" $target_req_file
diff --git a/.github/scripts/update_dev_dependency_branches.sh b/.github/scripts/update_dev_dependency_branches.sh
index 022df6a8a..9385cf885 100755
--- a/.github/scripts/update_dev_dependency_branches.sh
+++ b/.github/scripts/update_dev_dependency_branches.sh
@@ -5,8 +5,8 @@ set -e
 dbt_adapters_branch=$1
 dbt_core_branch=$2
 dbt_common_branch=$3
-target_req_file="dev-requirements.txt"
-core_req_sed_pattern="s|dbt-core.git.*#egg=dbt-core|dbt-core.git@${dbt_core_branch}#egg=dbt-core|g"
+target_req_file="hatch.toml"
+core_req_sed_pattern="s|dbt-core.git.*#subdirectory=core|dbt-core.git@${dbt_core_branch}#subdirectory=core|g"
 adapters_req_sed_pattern="s|dbt-adapters.git|dbt-adapters.git@${dbt_adapters_branch}|g"
 common_req_sed_pattern="s|dbt-common.git|dbt-common.git@${dbt_common_branch}|g"
 if [[ "$OSTYPE" == darwin* ]]; then
diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 35bd9cae0..c79147e8f 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -133,12 +133,11 @@ jobs:
             ${{ inputs.dbt_adapters_branch }} \
             ${{ inputs.dbt_core_branch }} \
             ${{ inputs.dbt_common_branch }}
-          cat dev-requirements.txt
+          cat hatch.toml
 
       - name: Install python dependencies
+        uses: pypa/hatch@install
         run: |
-          python -m pip install --user --upgrade pip
-          python -m pip --version
           python -m pip install -r dagger/requirements.txt
 
       - name: Run tests for ${{ matrix.test }}

From 988a88c624bceebe82c313d07cc168ab4be96b1c Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 9 Dec 2024 13:40:57 -0500
Subject: [PATCH 08/23] update dependency updater for testing feature branches

---
 .github/workflows/integration.yml | 2 --
 1 file changed, 2 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index c79147e8f..c771e52f8 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -137,8 +137,6 @@ jobs:
 
       - name: Install python dependencies
         uses: pypa/hatch@install
-        run: |
-          python -m pip install -r dagger/requirements.txt
 
       - name: Run tests for ${{ matrix.test }}
         run: python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }}

From 5dd699278479fd4cf45bc44db074f1a8d0c21a2d Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 9 Dec 2024 14:50:14 -0500
Subject: [PATCH 09/23] update dagger script for hatch

---
 dagger/run_dbt_spark_tests.py | 30 +++---------------------------
 hatch.toml                    |  1 +
 2 files changed, 4 insertions(+), 27 deletions(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index 6c310a6f8..611f48692 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -94,11 +94,9 @@ async def test_spark(test_args):
         pip_cache = client.cache_volume("pip")
 
         # setup directories as we don't want to copy the whole repo into the container
-        req_files = client.host().directory(
-            "./", include=["*.txt", "*.env", "*.ini", "*.md", "setup.py"]
+        client.host().directory(
+            "./", include=["*.env", "hatch.toml", "pyproject.toml", "./dbt", "./tests"]
         )
-        dbt_spark_dir = client.host().directory("./dbt")
-        test_dir = client.host().directory("./tests")
         scripts = client.host().directory("./dagger/scripts")
 
         platform = dagger.Platform("linux/amd64")
@@ -110,27 +108,6 @@ async def test_spark(test_args):
             # install OS deps first so any local changes don't invalidate the cache
             .with_directory("/scripts", scripts)
             .with_exec(["./scripts/install_os_reqs.sh"])
-            # install dbt-spark + python deps
-            .with_directory("/src", req_files)
-            .with_exec(["pip", "install", "-U", "pip"])
-            .with_workdir("/src")
-            .with_exec(["pip", "install", "-r", "requirements.txt"])
-            .with_exec(["pip", "install", "-r", "dev-requirements.txt"])
-        )
-
-        # install local dbt-spark changes
-        tst_container = (
-            tst_container.with_workdir("/")
-            .with_directory("src/dbt", dbt_spark_dir)
-            .with_workdir("/src")
-            .with_exec(["pip", "install", "-e", "."])
-        )
-
-        # install local test changes
-        tst_container = (
-            tst_container.with_workdir("/")
-            .with_directory("src/tests", test_dir)
-            .with_workdir("/src")
         )
 
         if test_profile == "apache_spark":
@@ -145,13 +122,12 @@ async def test_spark(test_args):
             )
 
         elif test_profile == "spark_session":
-            tst_container = tst_container.with_exec(["pip", "install", "pyspark"])
             tst_container = tst_container.with_exec(["apt-get", "install", "openjdk-17-jre", "-y"])
 
         tst_container = tst_container.with_(env_variables(TESTING_ENV_VARS))
         test_path = test_args.test_path if test_args.test_path else "tests/functional/adapter"
         result = await tst_container.with_exec(
-            ["pytest", "-v", "--profile", test_profile, "-n", "auto", test_path]
+            ["hatch", "run", "pytest", "-v", "--profile", test_profile, "-n", "auto", test_path]
         ).stdout()
 
         return result
diff --git a/hatch.toml b/hatch.toml
index a255a6d0a..8fec666b8 100644
--- a/hatch.toml
+++ b/hatch.toml
@@ -13,6 +13,7 @@ dependencies = [
     "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git",
     "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter",
     "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core",
+    "anyio",
     "beartype<0.18.0",
     "dagger-io~=0.9.7",
     "ddtrace==2.3.0",

From 8c5f3240c6cdce5ba1c25a289132383265fda9cc Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 9 Dec 2024 14:51:44 -0500
Subject: [PATCH 10/23] update dagger script for hatch

---
 .github/workflows/integration.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index c771e52f8..1160a5109 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -139,4 +139,4 @@ jobs:
         uses: pypa/hatch@install
 
       - name: Run tests for ${{ matrix.test }}
-        run: python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }}
+        run: hatch run python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }}

From 6943cc2d196e4205dbbb050f36e0ee03f14d37ba Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 9 Dec 2024 15:33:00 -0500
Subject: [PATCH 11/23] add explicit req files back

---
 dagger/run_dbt_spark_tests.py | 5 ++++-
 hatch.toml                    | 1 -
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index 611f48692..4b8f2f4de 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -94,7 +94,7 @@ async def test_spark(test_args):
         pip_cache = client.cache_volume("pip")
 
         # setup directories as we don't want to copy the whole repo into the container
-        client.host().directory(
+        req_files = client.host().directory(
             "./", include=["*.env", "hatch.toml", "pyproject.toml", "./dbt", "./tests"]
         )
         scripts = client.host().directory("./dagger/scripts")
@@ -108,6 +108,9 @@ async def test_spark(test_args):
             # install OS deps first so any local changes don't invalidate the cache
             .with_directory("/scripts", scripts)
             .with_exec(["./scripts/install_os_reqs.sh"])
+            # install dbt-spark + python deps
+            .with_directory("/src", req_files)
+            .with_workdir("/src")
         )
 
         if test_profile == "apache_spark":
diff --git a/hatch.toml b/hatch.toml
index 8fec666b8..a255a6d0a 100644
--- a/hatch.toml
+++ b/hatch.toml
@@ -13,7 +13,6 @@ dependencies = [
     "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git",
     "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter",
     "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core",
-    "anyio",
     "beartype<0.18.0",
     "dagger-io~=0.9.7",
     "ddtrace==2.3.0",

From 40861596618d3d35db361f35f8c3e570cb2839ed Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Tue, 10 Dec 2024 17:51:20 -0500
Subject: [PATCH 12/23] fix pytest config in pyproject.toml

---
 pyproject.toml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 02d9a462a..4079e0af5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -50,8 +50,9 @@ Repository = "https://github.com/dbt-labs/dbt-spark.git"
 Issues = "https://github.com/dbt-labs/dbt-spark/issues"
 Changelog = "https://github.com/dbt-labs/dbt-spark/blob/main/CHANGELOG.md"
 
-[tool.pytest]
+[tool.pytest.ini_options]
 testpaths = ["tests/functional", "tests/unit"]
+env_files = ["test.env"]
 addopts = "-v -n auto"
 color = true
 filterwarnings = [

From b561e08fb3216a180b64515c8b1aa7a4e8828005 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Tue, 10 Dec 2024 17:51:39 -0500
Subject: [PATCH 13/23] update hatch integration tests command

---
 .github/workflows/integration.yml | 2 +-
 hatch.toml                        | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 1160a5109..ba59c9401 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -139,4 +139,4 @@ jobs:
         uses: pypa/hatch@install
 
       - name: Run tests for ${{ matrix.test }}
-        run: hatch run python dagger/run_dbt_spark_tests.py --profile ${{ matrix.test }}
+        run: hatch run integration-tests --profile ${{ matrix.test }}
diff --git a/hatch.toml b/hatch.toml
index a255a6d0a..08cfc18b5 100644
--- a/hatch.toml
+++ b/hatch.toml
@@ -32,7 +32,7 @@ features=["all"]
 setup = "pre-commit install"
 code-quality = "pre-commit run --all-files"
 unit-tests = "python -m pytest {args:tests/unit}"
-integration-tests = "python -m pytest {args:tests/functional}"
+integration-tests = "python dagger/run_dbt_spark_tests.py {args:--profile apache_spark}"
 docker-prod = "docker build -f docker/Dockerfile -t dbt-spark ."
 
 [envs.build]

From f7f6863eb627a6b6eae631d22032d84ec761267f Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Mon, 16 Dec 2024 20:17:19 -0500
Subject: [PATCH 14/23] remove deleted file from rebasing

---
 .bumpversion.cfg | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 delete mode 100644 .bumpversion.cfg

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
deleted file mode 100644
index e69de29bb..000000000

From d51e52c765c45369a914d2323b51ee9373a677d6 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Tue, 17 Dec 2024 20:13:29 -0500
Subject: [PATCH 15/23] revert some changes to minimize the PR diffs

---
 dagger/run_dbt_spark_tests.py | 23 +++++++++++++++++++++--
 1 file changed, 21 insertions(+), 2 deletions(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index 4b8f2f4de..a910d706c 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -95,8 +95,10 @@ async def test_spark(test_args):
 
         # setup directories as we don't want to copy the whole repo into the container
         req_files = client.host().directory(
-            "./", include=["*.env", "hatch.toml", "pyproject.toml", "./dbt", "./tests"]
+            "./", include=["test.env", "hatch.toml", "pyproject.toml"]
         )
+        dbt_spark_dir = client.host().directory("./dbt")
+        test_dir = client.host().directory("./tests")
         scripts = client.host().directory("./dagger/scripts")
 
         platform = dagger.Platform("linux/amd64")
@@ -110,6 +112,22 @@ async def test_spark(test_args):
             .with_exec(["./scripts/install_os_reqs.sh"])
             # install dbt-spark + python deps
             .with_directory("/src", req_files)
+            .with_exec(["pip", "install", "-U", "pip", "hatch"])
+        )
+
+        # install local dbt-spark changes
+        tst_container = (
+            tst_container.with_workdir("/")
+            .with_directory("src/dbt", dbt_spark_dir)
+            .with_workdir("/src")
+            .with_exec(["hatch", "shell"])
+        )
+
+        # install local test changes
+        tst_container = (
+            tst_container.with_workdir("/")
+            .with_directory("src/tests", test_dir)
+            .with_workdir("/src")
             .with_workdir("/src")
         )
 
@@ -125,12 +143,13 @@ async def test_spark(test_args):
             )
 
         elif test_profile == "spark_session":
+            tst_container = tst_container.with_exec(["pip", "install", "pyspark"])
             tst_container = tst_container.with_exec(["apt-get", "install", "openjdk-17-jre", "-y"])
 
         tst_container = tst_container.with_(env_variables(TESTING_ENV_VARS))
         test_path = test_args.test_path if test_args.test_path else "tests/functional/adapter"
         result = await tst_container.with_exec(
-            ["hatch", "run", "pytest", "-v", "--profile", test_profile, "-n", "auto", test_path]
+            ["hatch", "run", "pytest", "--profile", test_profile, test_path]
         ).stdout()
 
         return result

From 192eb645260c66449695863f44bd37e209905907 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Tue, 17 Dec 2024 20:15:19 -0500
Subject: [PATCH 16/23] revert some changes to minimize the PR diffs

---
 dagger/run_dbt_spark_tests.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index a910d706c..828c39381 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -128,7 +128,6 @@ async def test_spark(test_args):
             tst_container.with_workdir("/")
             .with_directory("src/tests", test_dir)
             .with_workdir("/src")
-            .with_workdir("/src")
         )
 
         if test_profile == "apache_spark":

From 84c6dc2903b70277bc72fc4c16ee9611d0c3d08e Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Tue, 17 Dec 2024 20:21:14 -0500
Subject: [PATCH 17/23] include readme and license for building

---
 dagger/run_dbt_spark_tests.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index 828c39381..68dc12105 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -95,7 +95,7 @@ async def test_spark(test_args):
 
         # setup directories as we don't want to copy the whole repo into the container
         req_files = client.host().directory(
-            "./", include=["test.env", "hatch.toml", "pyproject.toml"]
+            "./", include=["test.env", "hatch.toml", "pyproject.toml", "README.md", "License.md"]
         )
         dbt_spark_dir = client.host().directory("./dbt")
         test_dir = client.host().directory("./tests")

From f9472fb27c3be94bccac131d2623c317d2eacd3f Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Tue, 17 Dec 2024 20:52:16 -0500
Subject: [PATCH 18/23] keep the dagger reqs in their own file; dagger requires
 py310 and we test on py39

---
 .github/workflows/integration.yml | 8 +++++++-
 dagger/requirements.txt           | 3 +++
 hatch.toml                        | 2 --
 3 files changed, 10 insertions(+), 3 deletions(-)
 create mode 100644 dagger/requirements.txt

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index ba59c9401..6060d1edc 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -135,8 +135,14 @@ jobs:
             ${{ inputs.dbt_common_branch }}
           cat hatch.toml
 
-      - name: Install python dependencies
+      - name: Install hatch
         uses: pypa/hatch@install
 
+      - name: Install python dependencies
+        run: |
+          python -m pip install --user --upgrade pip
+          python -m pip --version
+          python -m pip install -r dagger/requirements.txt
+
       - name: Run tests for ${{ matrix.test }}
         run: hatch run integration-tests --profile ${{ matrix.test }}
diff --git a/dagger/requirements.txt b/dagger/requirements.txt
new file mode 100644
index 000000000..f150e3093
--- /dev/null
+++ b/dagger/requirements.txt
@@ -0,0 +1,3 @@
+beartype<0.18.0
+dagger-io~=0.9.7
+python-dotenv
diff --git a/hatch.toml b/hatch.toml
index 08cfc18b5..55a51c7cb 100644
--- a/hatch.toml
+++ b/hatch.toml
@@ -13,8 +13,6 @@ dependencies = [
     "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git",
     "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter",
     "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core",
-    "beartype<0.18.0",
-    "dagger-io~=0.9.7",
     "ddtrace==2.3.0",
     "ipdb~=0.13.13",
     "pre-commit==3.7.0",

From b6acf21da6c8c2d3a6c43016a88bdf952bff6b4d Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Tue, 17 Dec 2024 20:59:32 -0500
Subject: [PATCH 19/23] install dagger reqs in the hatch environment

---
 .github/workflows/integration.yml | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml
index 6060d1edc..870ac13b3 100644
--- a/.github/workflows/integration.yml
+++ b/.github/workflows/integration.yml
@@ -139,10 +139,7 @@ jobs:
         uses: pypa/hatch@install
 
       - name: Install python dependencies
-        run: |
-          python -m pip install --user --upgrade pip
-          python -m pip --version
-          python -m pip install -r dagger/requirements.txt
+        run: hatch run pip install -r dagger/requirements.txt
 
       - name: Run tests for ${{ matrix.test }}
         run: hatch run integration-tests --profile ${{ matrix.test }}

From dc41792a3f93179e10611017dd0b78c8faace365 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Thu, 19 Dec 2024 15:29:23 -0500
Subject: [PATCH 20/23] update spark container to be more clear about what is
 happening when

---
 dagger/run_dbt_spark_tests.py | 70 +++++++++++++++--------------------
 1 file changed, 29 insertions(+), 41 deletions(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index 68dc12105..3c3fb935d 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -87,68 +87,56 @@ def get_spark_container(client: dagger.Client) -> (dagger.Service, str):
 
 async def test_spark(test_args):
     async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
-        test_profile = test_args.profile
 
         # create cache volumes, these are persisted between runs saving time when developing locally
-        os_reqs_cache = client.cache_volume("os_reqs")
-        pip_cache = client.cache_volume("pip")
-
-        # setup directories as we don't want to copy the whole repo into the container
-        req_files = client.host().directory(
-            "./", include=["test.env", "hatch.toml", "pyproject.toml", "README.md", "License.md"]
-        )
-        dbt_spark_dir = client.host().directory("./dbt")
-        test_dir = client.host().directory("./tests")
-        scripts = client.host().directory("./dagger/scripts")
-
-        platform = dagger.Platform("linux/amd64")
         tst_container = (
-            client.container(platform=platform)
+            client.container(platform=dagger.Platform("linux/amd64"))
             .from_("python:3.9-slim")
-            .with_mounted_cache("/var/cache/apt/archives", os_reqs_cache)
-            .with_mounted_cache("/root/.cache/pip", pip_cache)
-            # install OS deps first so any local changes don't invalidate the cache
-            .with_directory("/scripts", scripts)
-            .with_exec(["./scripts/install_os_reqs.sh"])
-            # install dbt-spark + python deps
-            .with_directory("/src", req_files)
-            .with_exec(["pip", "install", "-U", "pip", "hatch"])
+            .with_mounted_cache("/var/cache/apt/archives", client.cache_volume("os_reqs"))
+            .with_mounted_cache("/root/.cache/pip", client.cache_volume("pip"))
         )
 
-        # install local dbt-spark changes
+        # install system dependencies first so any local changes don't invalidate the cache
         tst_container = (
             tst_container.with_workdir("/")
-            .with_directory("src/dbt", dbt_spark_dir)
-            .with_workdir("/src")
-            .with_exec(["hatch", "shell"])
+            .with_directory("/scripts", client.host().directory("./dagger/scripts"))
+            .with_exec(["./scripts/install_os_reqs.sh"])
+            .with_exec(["pip", "install", "-U", "pip", "hatch"])
+            .with_(env_variables(TESTING_ENV_VARS))
         )
 
-        # install local test changes
+        # copy project files into image
         tst_container = (
             tst_container.with_workdir("/")
-            .with_directory("src/tests", test_dir)
-            .with_workdir("/src")
+            .with_directory("/src/dbt", client.host().directory("./dbt"))
+            .with_directory("/src/tests", client.host().directory("./tests"))
+            .with_file("/src/hatch.toml", client.host().file("./hatch.toml"))
+            .with_file("/src/License.md", client.host().file("./License.md"))
+            .with_file("/src/pyproject.toml", client.host().file("./pyproject.toml"))
+            .with_file("/src/README.md", client.host().file("./README.md"))
+            .with_file("/src/test.env", client.host().file("./test.env"))
         )
 
-        if test_profile == "apache_spark":
+        # install profile-specific system dependencies last since tests usually rotate through profiles
+        if test_args.profile == "apache_spark":
             spark_ctr, spark_host = get_spark_container(client)
             tst_container = tst_container.with_service_binding(alias=spark_host, service=spark_ctr)
 
-        elif test_profile in ["databricks_cluster", "databricks_sql_endpoint", "spark_http_odbc"]:
-            tst_container = (
-                tst_container.with_workdir("/")
-                .with_exec(["./scripts/configure_odbc.sh"])
-                .with_workdir("/src")
+        elif test_args.profile in [
+            "databricks_cluster",
+            "databricks_sql_endpoint",
+            "spark_http_odbc",
+        ]:
+            tst_container = tst_container.with_workdir("/").with_exec(
+                ["./scripts/configure_odbc.sh"]
             )
 
-        elif test_profile == "spark_session":
-            tst_container = tst_container.with_exec(["pip", "install", "pyspark"])
+        elif test_args.profile == "spark_session":
             tst_container = tst_container.with_exec(["apt-get", "install", "openjdk-17-jre", "-y"])
 
-        tst_container = tst_container.with_(env_variables(TESTING_ENV_VARS))
-        test_path = test_args.test_path if test_args.test_path else "tests/functional/adapter"
+        # run the tests
         result = await tst_container.with_exec(
-            ["hatch", "run", "pytest", "--profile", test_profile, test_path]
+            ["hatch", "run", "pytest", "--profile", test_args.profile, test_args.test_path]
         ).stdout()
 
         return result
@@ -156,7 +144,7 @@ async def test_spark(test_args):
 
 parser = argparse.ArgumentParser()
 parser.add_argument("--profile", required=True, type=str)
-parser.add_argument("--test-path", required=False, type=str)
+parser.add_argument("--test-path", required=False, type=str, default="tests/functional/adapter")
 args = parser.parse_args()
 
 anyio.run(test_spark, args)

From 7a93bfc484b6207a86d97abaf8faea12f7f8fd7d Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Thu, 19 Dec 2024 15:35:30 -0500
Subject: [PATCH 21/23] allow for test.env to not exist, needed for ci

---
 dagger/run_dbt_spark_tests.py | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index 3c3fb935d..5249a640b 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -114,9 +114,16 @@ async def test_spark(test_args):
             .with_file("/src/License.md", client.host().file("./License.md"))
             .with_file("/src/pyproject.toml", client.host().file("./pyproject.toml"))
             .with_file("/src/README.md", client.host().file("./README.md"))
-            .with_file("/src/test.env", client.host().file("./test.env"))
         )
 
+        # try to copy over the .env file for local testing
+        try:
+            tst_container = tst_container.with_workdir("/").with_file(
+                "/src/test.env", client.host().file("./test.env")
+            )
+        except dagger.QueryError:
+            pass
+
         # install profile-specific system dependencies last since tests usually rotate through profiles
         if test_args.profile == "apache_spark":
             spark_ctr, spark_host = get_spark_container(client)

From 9904ac1adad689a35fdde7a072dd458a52aa082a Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Thu, 19 Dec 2024 15:58:55 -0500
Subject: [PATCH 22/23] allow for test.env to not exist, needed for ci

---
 dagger/run_dbt_spark_tests.py | 25 +++++++++++++------------
 1 file changed, 13 insertions(+), 12 deletions(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index 5249a640b..acdd04334 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -110,19 +110,20 @@ async def test_spark(test_args):
             tst_container.with_workdir("/")
             .with_directory("/src/dbt", client.host().directory("./dbt"))
             .with_directory("/src/tests", client.host().directory("./tests"))
-            .with_file("/src/hatch.toml", client.host().file("./hatch.toml"))
-            .with_file("/src/License.md", client.host().file("./License.md"))
-            .with_file("/src/pyproject.toml", client.host().file("./pyproject.toml"))
-            .with_file("/src/README.md", client.host().file("./README.md"))
-        )
-
-        # try to copy over the .env file for local testing
-        try:
-            tst_container = tst_container.with_workdir("/").with_file(
-                "/src/test.env", client.host().file("./test.env")
+            .with_directory(
+                "/src",
+                client.host().directory(
+                    "./",
+                    include=[
+                        "pyproject.toml",
+                        "hatch.toml",
+                        "License.md",  # referenced in build metadata
+                        "README.md",  # referenced in build metadata
+                        "test.env",  # may not exist locally, does not exist in ci
+                    ],
+                ),
             )
-        except dagger.QueryError:
-            pass
+        )
 
         # install profile-specific system dependencies last since tests usually rotate through profiles
         if test_args.profile == "apache_spark":

From f65a5f68d98e9968576709a16dc5b3e7d7a2a091 Mon Sep 17 00:00:00 2001
From: Mike Alfare 
Date: Thu, 19 Dec 2024 16:05:16 -0500
Subject: [PATCH 23/23] run tests from the correct working directory

---
 dagger/run_dbt_spark_tests.py | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)

diff --git a/dagger/run_dbt_spark_tests.py b/dagger/run_dbt_spark_tests.py
index acdd04334..f1efb434c 100644
--- a/dagger/run_dbt_spark_tests.py
+++ b/dagger/run_dbt_spark_tests.py
@@ -143,9 +143,13 @@ async def test_spark(test_args):
             tst_container = tst_container.with_exec(["apt-get", "install", "openjdk-17-jre", "-y"])
 
         # run the tests
-        result = await tst_container.with_exec(
-            ["hatch", "run", "pytest", "--profile", test_args.profile, test_args.test_path]
-        ).stdout()
+        result = (
+            await tst_container.with_workdir("/src")
+            .with_exec(
+                ["hatch", "run", "pytest", "--profile", test_args.profile, test_args.test_path]
+            )
+            .stdout()
+        )
 
         return result