From fbf9abe397abd11baab6dfa7a903346774fec345 Mon Sep 17 00:00:00 2001 From: Chenyu Li Date: Wed, 13 Apr 2022 09:50:15 -0600 Subject: [PATCH] catch table or view not exist error in spark 3.0+ (#331) --- .bumpversion.cfg | 2 +- .github/workflows/main.yml | 13 +++++++++++++ CHANGELOG.md | 2 +- dbt/adapters/spark/__version__.py | 2 +- dbt/adapters/spark/impl.py | 2 +- setup.py | 2 +- 6 files changed, 18 insertions(+), 5 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9a0c41a56..744284849 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.1.0b1 +current_version = 1.2.0a1 parse = (?P\d+) \.(?P\d+) \.(?P\d+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 60a0d6f60..fbdbbbaae 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -122,6 +122,9 @@ jobs: runs-on: ubuntu-latest + outputs: + is_alpha: ${{ steps.check-is-alpha.outputs.is_alpha }} + steps: - name: Check out the repository uses: actions/checkout@v2 @@ -150,6 +153,14 @@ jobs: - name: Check wheel contents run: | check-wheel-contents dist/*.whl --ignore W007,W008 + + - name: Check if this is an alpha version + id: check-is-alpha + run: | + export is_alpha=0 + if [[ "$(ls -lh dist/)" == *"a1"* ]]; then export is_alpha=1; fi + echo "::set-output name=is_alpha::$is_alpha" + - uses: actions/upload-artifact@v2 with: name: dist @@ -158,6 +169,8 @@ jobs: test-build: name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }} + if: needs.build.outputs.is_alpha == 0 + needs: build runs-on: ${{ matrix.os }} diff --git a/CHANGELOG.md b/CHANGELOG.md index bb54c92f3..f9a094942 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ - Use dbt.tests.adapter.basic in test suite ([#298](https://github.com/dbt-labs/dbt-spark/issues/298), [#299](https://github.com/dbt-labs/dbt-spark/pull/299)) - Make internal macros use macro dispatch to be overridable in child adapters ([#319](https://github.com/dbt-labs/dbt-spark/issues/319), [#320](https://github.com/dbt-labs/dbt-spark/pull/320)) - Override adapter method 'run_sql_for_tests' ([#323](https://github.com/dbt-labs/dbt-spark/issues/323), [#324](https://github.com/dbt-labs/dbt-spark/pull/324)) - +- when a table or view doesn't exist, 'adapter.get_columns_in_relation' will return empty list instead of fail ([#328]https://github.com/dbt-labs/dbt-spark/pull/328) ### Contributors - [@JCZuurmond](https://github.com/dbt-labs/dbt-spark/pull/279) ( [#279](https://github.com/dbt-labs/dbt-spark/pull/279)) diff --git a/dbt/adapters/spark/__version__.py b/dbt/adapters/spark/__version__.py index 56ec17a89..a6b977228 100644 --- a/dbt/adapters/spark/__version__.py +++ b/dbt/adapters/spark/__version__.py @@ -1 +1 @@ -version = "1.1.0b1" +version = "1.2.0a1" diff --git a/dbt/adapters/spark/impl.py b/dbt/adapters/spark/impl.py index 268417d07..eb001fbc9 100644 --- a/dbt/adapters/spark/impl.py +++ b/dbt/adapters/spark/impl.py @@ -239,7 +239,7 @@ def get_columns_in_relation(self, relation: Relation) -> List[SparkColumn]: # CDW would just return and empty list, normalizing the behavior here errmsg = getattr(e, "msg", "") if ( - f"Table or view not found: {relation}" in errmsg or + "Table or view not found" in errmsg or "NoSuchTableException" in errmsg ): pass diff --git a/setup.py b/setup.py index 2cd44491e..12ecbacde 100644 --- a/setup.py +++ b/setup.py @@ -52,7 +52,7 @@ def _get_dbt_core_version(): package_name = "dbt-spark" -package_version = "1.1.0b1" +package_version = "1.2.0a1" dbt_core_version = _get_dbt_core_version() description = """The Apache Spark adapter plugin for dbt"""