diff --git a/tests/functional/adapter/test_persist_test_results.py b/tests/functional/adapter/test_persist_test_results.py index a8b5fda54..760c5a8c7 100644 --- a/tests/functional/adapter/test_persist_test_results.py +++ b/tests/functional/adapter/test_persist_test_results.py @@ -1,3 +1,5 @@ +from typing import Dict + import pytest from dbt.tests.adapter.persist_test_results.basic import PersistTestResults @@ -8,11 +10,15 @@ class TestPersistTestResultsDatabricks(PersistTestResults): pass +@pytest.mark.skip("Spark handles delete differently and this is not yet accounted for.") @pytest.mark.skip_profile("spark_session", "databricks_cluster", "databricks_sql_endpoint") class TestPersistTestResultsSpark(PersistTestResults): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "seeds": {"quote_columns": True}, - "tests": {"+schema": self.audit_schema_suffix}, - } + def delete_record(self, project, record: Dict[str, str]): + """ + Using "DELETE FROM" with Spark throws the following error: + dbt.exceptions.DbtDatabaseError: Database Error + org.apache.hive.service.cli.HiveSQLException: + Error running query: org.apache.spark.sql.AnalysisException: + DELETE is only supported with v2 tables. + """ + pass