diff --git a/.changes/unreleased/Fixes-20241023-105407.yaml b/.changes/unreleased/Fixes-20241023-105407.yaml new file mode 100644 index 000000000..9c8c9b554 --- /dev/null +++ b/.changes/unreleased/Fixes-20241023-105407.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Pin build dependencies to final releases +time: 2024-10-23T10:54:07.142933-04:00 +custom: + Author: mikealfare + Issue: "1129" diff --git a/setup.py b/setup.py index 9ad9bfa38..5d9d882e0 100644 --- a/setup.py +++ b/setup.py @@ -65,10 +65,10 @@ def _get_plugin_version_dict(): include_package_data=True, install_requires=[ "sqlparams>=3.0.0", - "dbt-common>=0.1.0a1,<2.0", - "dbt-adapters>=0.1.0a1,<2.0", + "dbt-common>=0.1.0,<2.0", + "dbt-adapters>=0.1.0,<2.0", # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency - "dbt-core>=1.8.0a1", + "dbt-core>=1.8.0", ], extras_require={ "ODBC": odbc_extras, diff --git a/tests/functional/adapter/test_python_model.py b/tests/functional/adapter/test_python_model.py index 05e25c5f4..70a74e0be 100644 --- a/tests/functional/adapter/test_python_model.py +++ b/tests/functional/adapter/test_python_model.py @@ -5,7 +5,12 @@ BasePythonModelTests, BasePythonIncrementalTests, ) -from dbt.tests.adapter.python_model.test_spark import BasePySparkTests +from dbt.tests.adapter.python_model.test_spark import ( + BasePySparkTests, + PANDAS_MODEL, + PANDAS_ON_SPARK_MODEL, + PYSPARK_MODEL, +) @pytest.mark.skip_profile("apache_spark", "spark_session", "databricks_sql_endpoint") @@ -15,7 +20,13 @@ class TestPythonModelSpark(BasePythonModelTests): @pytest.mark.skip_profile("apache_spark", "spark_session", "databricks_sql_endpoint") class TestPySpark(BasePySparkTests): - pass + @pytest.fixture(scope="class") + def models(self): + return { + "pandas_df.py": PANDAS_MODEL, + "pyspark_df.py": PYSPARK_MODEL, + "pandas_on_spark_df.py": PANDAS_ON_SPARK_MODEL, + } @pytest.mark.skip_profile("apache_spark", "spark_session", "databricks_sql_endpoint")