diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..c604158 --- /dev/null +++ b/.flake8 @@ -0,0 +1,12 @@ +[flake8] +select = + E + W + F +ignore = + W503 # makes Flake8 work like black + W504 + E203 # makes Flake8 work like black + E741 + E501 +exclude = tests \ No newline at end of file diff --git a/.github/workflows/.gitkeep b/.github/workflows/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml new file mode 100644 index 0000000..e69de29 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..e69de29 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9fe17bc --- /dev/null +++ b/.gitignore @@ -0,0 +1,129 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ \ No newline at end of file diff --git a/.pre-commit-configt.yaml b/.pre-commit-configt.yaml new file mode 100644 index 0000000..1f88c70 --- /dev/null +++ b/.pre-commit-configt.yaml @@ -0,0 +1,53 @@ +# For more on configuring pre-commit hooks (see https://pre-commit.com/) + +# TODO: remove global exclusion of tests when testing overhaul is complete +exclude: '^tests/.*' + +default_language_version: + python: python3.8 + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: check-yaml + args: [--unsafe] + - id: check-json + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + args: + - "--line-length=99" + - "--target-version=py38" + - id: black + alias: black-check + stages: [manual] + args: + - "--line-length=99" + - "--target-version=py38" + - "--check" + - "--diff" +- repo: https://gitlab.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + - id: flake8 + alias: flake8-check + stages: [manual] +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.782 + hooks: + - id: mypy + args: [--show-error-codes, --ignore-missing-imports] + files: ^dbt/adapters/.* + language: system + - id: mypy + alias: mypy-check + stages: [manual] + args: [--show-error-codes, --pretty, --ignore-missing-imports] + files: ^dbt/adapters + language: system \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..78412d5 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +recursive-include dbt/include *.sql *.yml *.md \ No newline at end of file diff --git a/dbt/adapters/testadapter/__init__.py b/dbt/adapters/testadapter/__init__.py new file mode 100644 index 0000000..3c0daf2 --- /dev/null +++ b/dbt/adapters/testadapter/__init__.py @@ -0,0 +1,12 @@ +from dbt.adapters.testadapter.connections import TestAdapterConnectionManager +from dbt.adapters.testadapter.connections import TestAdapterCredentials +from dbt.adapters.testadapter.impl import TestAdapterAdapter + +from dbt.adapters.base import AdapterPlugin +from dbt.include import testadapter + + +Plugin = AdapterPlugin( + adapter=TestAdapterAdapter, + credentials=TestAdapterCredentials, + include_path=testadapter.PACKAGE_PATH) \ No newline at end of file diff --git a/dbt/adapters/testadapter/__version__.py b/dbt/adapters/testadapter/__version__.py new file mode 100644 index 0000000..1e83699 --- /dev/null +++ b/dbt/adapters/testadapter/__version__.py @@ -0,0 +1 @@ +version = '1.0.0' \ No newline at end of file diff --git a/dbt/adapters/testadapter/connections.py b/dbt/adapters/testadapter/connections.py new file mode 100644 index 0000000..86825d6 --- /dev/null +++ b/dbt/adapters/testadapter/connections.py @@ -0,0 +1,121 @@ +from contextlib import contextmanager +from dataclasses import dataclass +import dbt.exceptions +from dbt.adapters.base import Credentials +from dbt.adapters.sql import SQLConnectionManager +from dbt.logger import GLOBAL_LOGGER as logger + +@dataclass +class TestAdapterCredentials(Credentials): + ''' + Defines database specific credentials that get added to + profiles.yml to connect to new adapter + ''' + + # Add credentials members here, like: + # host: str + # port: int + # username: str + # password: str + + _ALIASES = { + 'dbname':'database', + 'pass':'password', + 'user':'username' + } + + @property + def type(self): + '''Return name of adapter.''' + return 'testadapter' + + @property + def unique_field(self): + """ + Hashed and included in anonymous telemetry to track adapter adoption. + Pick a field that can uniquely identify one team/organization building with this adapter + """ + return self.host + + def _connection_keys(self): + """ + List of keys to display in the `dbt debug` output. + """ + return ('host','port','username','user') + +class TestAdapterConnectionManager(SQLConnectionManager): + TYPE = 'testadapter' + + + @contextmanager + def exception_handler(self, sql: str): + ''' + Returns a context manager, that will handle exceptions raised + from queries, catch, log, and raise dbt exceptions it knows how to handle. + ''' + # ## Example ## + # try: + # yield + # except myadapter_library.DatabaseError as exc: + # self.release(connection_name) + + # logger.debug('myadapter error: {}'.format(str(e))) + # raise dbt.exceptions.DatabaseException(str(exc)) + # except Exception as exc: + # logger.debug("Error running SQL: {}".format(sql)) + # logger.debug("Rolling back transaction.") + # self.release(connection_name) + # raise dbt.exceptions.RuntimeException(str(exc)) + pass + + @classmethod + def open(cls, connection): + ''' + Receives a connection object and a Credentials object + and moves it to the 'open' state. + ''' + # ## Example ## + # if connection.state == 'open': + # logger.debug('Connection is already open, skipping open.') + # return connection + + # credentials = connection.credentials + + # try: + # handle = myadapter_library.connect( + # host=credentials.host, + # port=credentials.port, + # username=credentials.username, + # password=credentials.password, + # catalog=credentials.database + # ) + # connection.state = 'open' + # connection.handle = handle + # return connection + pass + + @classmethod + def get_response(cls,cursor): + ''' + Gets a cursor object and returns adapter-specific information + about the last executed command generally a AdapterResponse ojbect + that has items such as code, rows_affected,etc. can also just be a string ex. 'OK' + if your cursor does not offer rich metadata. + ''' + # ## Example ## + # return cursor.status_message + pass + + def cancel(self, connection): + ''' + Gets a connection object and attempts to cancel any ongoing queries. + ''' + # ## Example ## + # tid = connection.handle.transaction_id() + # sql = 'select cancel_transaction({})'.format(tid) + # logger.debug("Cancelling query '{}' ({})".format(connection_name, pid)) + # _, cursor = self.add_query(sql, 'master') + # res = cursor.fetchone() + # logger.debug("Canceled query '{}': {}".format(connection_name, res)) + pass + \ No newline at end of file diff --git a/dbt/adapters/testadapter/impl.py b/dbt/adapters/testadapter/impl.py new file mode 100644 index 0000000..c0e306e --- /dev/null +++ b/dbt/adapters/testadapter/impl.py @@ -0,0 +1,21 @@ +# adapter_src, and adapter_cls comes from kwargs in create.py +from dbt.adapters.sql import SQLAdapter +from dbt.adapters.testadapter import TestAdapterConnectionManager + + + +class TestAdapterAdapter(SQLAdapter): + ''' + Controls actual implmentation of adapter, and ability to override certain methods. + ''' + + ConnectionManager = TestAdapterConnectionManager + + @classmethod + def date_function(cls): + ''' + Returns canonical date func + ''' + return 'datenow()' + + # may require more build out to make more user friendly to confer with team and community. diff --git a/dbt/include/testadapter/__init__.py b/dbt/include/testadapter/__init__.py new file mode 100644 index 0000000..dcca2d2 --- /dev/null +++ b/dbt/include/testadapter/__init__.py @@ -0,0 +1,2 @@ +import os +PACKAGE_PATH = os.path.dirname(__file__) \ No newline at end of file diff --git a/dbt/include/testadapter/dbt_project.yml b/dbt/include/testadapter/dbt_project.yml new file mode 100644 index 0000000..c0a8086 --- /dev/null +++ b/dbt/include/testadapter/dbt_project.yml @@ -0,0 +1,5 @@ +name: dbt_testadapter +version: 1.0.0 +config-version: 2 + +macro-paths: ["macros"] diff --git a/dbt/include/testadapter/macros/adapters.sql b/dbt/include/testadapter/macros/adapters.sql new file mode 100644 index 0000000..1f754ec --- /dev/null +++ b/dbt/include/testadapter/macros/adapters.sql @@ -0,0 +1,180 @@ +/* For examples of how to fill out the macros please refer to the postgres adapter and docs +postgres adapter macros: https://github.com/dbt-labs/dbt-core/blob/main/plugins/postgres/dbt/include/postgres/macros/adapters.sql +dbt docs: https://docs.getdbt.com/docs/contributing/building-a-new-adapter +*/ + +{% macro testadapter__alter_column_type(relation,column_name,new_column_type) -%} +'''Changes column name or data type''' +/* + 1. Create a new column (w/ temp name and correct type) + 2. Copy data over to it + 3. Drop the existing column (cascade!) + 4. Rename the new column to existing column +*/ +{% endmacro %} + +{% macro testadapter__check_schema_exists(information_schema,schema) -%} +'''Checks if schema name exists and returns number or times it shows up.''' +/* + 1. Check if schemas exist + 2. return number of rows or columns that match searched parameter +*/ +{% endmacro %} + +-- Example from postgres adapter in dbt-core +-- Notice how you can build out other methods than the designated ones for the impl.py file, +-- to make a more robust adapter. ex. (verify_database) + +/* + + {% macro postgres__create_schema(relation) -%} + {% if relation.database -%} + {{ adapter.verify_database(relation.database) }} + {%- endif -%} {%- call statement('create_schema') -%} + create schema if not exists {{ relation.without_identifier().include(database=False) }} + {%- endcall -%} + {% endmacro %} + +*/ + +{% macro testadapter__create_schema(relation) -%} +'''Creates a new schema in the target database, if schema already exists, method is a no-op. ''' +{% endmacro %} + +/* + +{% macro postgres__drop_schema(relation) -%} + {% if relation.database -%} + {{ adapter.verify_database(relation.database) }} + {%- endif -%} + {%- call statement('drop_schema') -%} + drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade + {%- endcall -%} +{% endmacro %} + +*/ + +{% macro testadapter__drop_relation(relation) -%} +'''Deletes relatonship identifer between tables.''' +/* + 1. If database exists + 2. Create a new schema if passed schema does not exist already +*/ +{% endmacro %} + +{% macro testadapter__drop_schema(relation) -%} +'''drops a schema in a target database.''' +/* + 1. If database exists + 2. search all calls of schema, and change include value to False, cascade it to backtrack +*/ +{% endmacro %} + +/* + + Example of 1 of 3 required macros that does not have a default implementation +{% macro postgres__get_columns_in_relation(relation) -%} + {% call statement('get_columns_in_relation', fetch_result=True) %} + select + column_name, + data_type, + character_maximum_length, + numeric_precision, + numeric_scale + + from {{ relation.information_schema('columns') }} + where table_name = '{{ relation.identifier }}' + {% if relation.schema %} + and table_schema = '{{ relation.schema }}' + {% endif %} + order by ordinal_position + + {% endcall %} + {% set table = load_result('get_columns_in_relation').table %} + {{ return(sql_convert_columns_in_relation(table)) }} +{% endmacro %} +*/ + + +{% macro testadapter__get_columns_in_relation(relation) -%} +'''Returns a list of Columns in a table.''' +/* + 1. select as many values from column as needed + 2. search relations to columns + 3. where table name is equal to the relation identifier + 4. if a relation schema exists and table schema is equal to the relation schema + 5. order in whatever way you want to call. + 6. create a table by loading result from call + 7. return new table +*/ +{% endmacro %} + +-- Example of 2 of 3 required macros that do not come with a default implementation + +/* + +{% macro postgres__list_relations_without_caching(schema_relation) %} + {% call statement('list_relations_without_caching', fetch_result=True) -%} + select + '{{ schema_relation.database }}' as database, + tablename as name, + schemaname as schema, + 'table' as type + from pg_tables + where schemaname ilike '{{ schema_relation.schema }}' + union all + select + '{{ schema_relation.database }}' as database, + viewname as name, + schemaname as schema, + 'view' as type + from pg_views + where schemaname ilike '{{ schema_relation.schema }}' + {% endcall %} + {{ return(load_result('list_relations_without_caching').table) }} +{% endmacro %} + +*/ + +{% macro testadapter__list_relations_without_caching(schema_relation) -%} +'''creates a table of relations withough using local caching.''' +{% endmacro %} + +{% macro testadapter__list_schemas(database) -%} +'''Returns a table of unique schemas.''' +/* + 1. search schemea by specific name + 2. create a table with names +*/ +{% endmacro %} + +{% macro testadapter__rename_relation(from_relation, to_relation) -%} +'''Renames a relation in the database.''' +/* + 1. Search for a specific relation name + 2. alter table by targeting specific name and passing in new name +*/ +{% endmacro %} + +{% macro testadapter__truncate_relation(relation) -%} +'''Removes all rows from a targeted set of tables.''' +/* + 1. grab all tables tied to the relation + 2. remove rows from relations +*/ +{% endmacro %} + +/* + +Example 3 of 3 of required macros that does not have a default implementation. + ** Good example of building out small methods ** please refer to impl.py for implementation of now() in postgres plugin +{% macro postgres__current_timestamp() -%} + now() +{%- endmacro %} + +*/ + +{% macro testadapter__current_timestamp() -%} +'''Returns current UTC time''' +{# docs show not to be implemented currently. #} +{% endmacro %} \ No newline at end of file diff --git a/dbt/include/testadapter/macros/catalog.sql b/dbt/include/testadapter/macros/catalog.sql new file mode 100644 index 0000000..946ff64 --- /dev/null +++ b/dbt/include/testadapter/macros/catalog.sql @@ -0,0 +1,36 @@ + {{% macro testadapter__get_catalog(information_schema, schemas)-%}} + + {{%set msg -%}} + get_catalog not implemented for testadapter + -%}} endset {{% + /* + Your database likely has a way of accessing metadata about its objects, + whether by querying an information schema or by running `show` and `describe` commands. + dbt will use this macro to generate its catalog of objects it knows about. The catalog is one of + the artifacts powering the documentation site. + As an example, below is a simplified version of postgres__get_catalog + */ + + /* + + select {{database}} as TABLE, + "- set table type -" + when 'v' then 'VIEW' + else 'BASE TABLE' + "- set table/view names and descriptions -" + + use several joins and search types for pulling info together, sorting etc.. + + where ( + search if schema exists, else build + + {%- for schema in schemas -%} + upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%} + {%- endfor -%} + + ) + define any shortcut keys + + */ + {{{{ exceptions.raise_compiler_error(msg) }}}} + {{% endmacro {{% \ No newline at end of file diff --git a/dbt/include/testadapter/profile_template.yml b/dbt/include/testadapter/profile_template.yml new file mode 100644 index 0000000..33ea6cb --- /dev/null +++ b/dbt/include/testadapter/profile_template.yml @@ -0,0 +1,19 @@ +fixed: + type: testadapter +prompts: + host: + hint: 'your host name' + port: + default: 5432 + type: 'int' + user: + hint: 'dev username' + password: + hint: 'dev password' + hide_input: true + dbname: + hint: 'default database' + threads: + hint: '1 or more' + type: 'int' + default: 1 diff --git a/dev_requirements.txt b/dev_requirements.txt new file mode 100644 index 0000000..de84763 --- /dev/null +++ b/dev_requirements.txt @@ -0,0 +1,22 @@ +# install latest changes in dbt-core +git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core +git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-tests-adapter&subdirectory=tests/adapter + +black==22.3.0 +bumpversion +flake8 +flaky +freezegun==0.3.12 +ipdb +mypy==0.782 +pip-tools +pre-commit +pytest +pytest-dotenv +pytest-logbook +pytest-csv +pytest-xdist +pytz +tox>=3.13 +twine +wheel \ No newline at end of file diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..60eb6cc --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +mypy_path = ./third-party-stubs +namespace_packages = True \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..919c463 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +filterwarnings = + ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning + ignore:unclosed file .*:ResourceWarning +env_files = + test.env +testpaths = + tests/unit + tests/integration + tests/functional \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..06529bf --- /dev/null +++ b/setup.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +from setuptools import find_namespace_packages, setup + +package_name = "dbt-testadapter" +# make sure this always matches dbt/adapters/{adapter}/__version__.py +package_version = "1.0.0" +description = """The testadapter adapter plugin for dbt""" + +setup( + name=package_name, + version=package_version, + description=description, + long_description=description, + author="", + author_email="", + url="", + packages=find_namespace_packages(include=['dbt', 'dbt.*']), + include_package_data=True, + install_requires=[ + 'dbt-core~=1.0.0.', + ] +) \ No newline at end of file diff --git a/test.env b/test.env new file mode 100644 index 0000000..dd31eee --- /dev/null +++ b/test.env @@ -0,0 +1 @@ +# Use this file to give a break down of what users would need in a .env file for connection purproses to test against. \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..deae211 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,13 @@ +import pytest +import os +import json + +# Import the fuctional fixtures as a plugin +# Note: fixtures with session scope need to be local + +pytest_plugins = ["dbt.tests.fixtures.project"] + +# The profile dictionary, used to write out profiles.yml +@pytest.fixture(scope="class") +def dbt_profile_target(): + pass \ No newline at end of file diff --git a/tests/functional/adapter/test_basic.py b/tests/functional/adapter/test_basic.py new file mode 100644 index 0000000..256ac3c --- /dev/null +++ b/tests/functional/adapter/test_basic.py @@ -0,0 +1,53 @@ +import pytest + +from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations +from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests +from dbt.tests.adapter.basic.test_singular_tests_ephemeral import ( + BaseSingularTestsEphemeral +) +from dbt.tests.adapter.basic.test_empty import BaseEmpty +from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral +from dbt.tests.adapter.basic.test_incremental import BaseIncremental +from dbt.tests.adapter.basic.test_generic_tests import BaseGenericTests +from dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols +from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp +from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod + + +class TestSimpleMaterializationsTestAdapter(BaseSimpleMaterializations): + pass + + +class TestSingularTestsTestAdapter(BaseSingularTests): + pass + + +class TestSingularTestsEphemeralTestAdapter(BaseSingularTestsEphemeral): + pass + + +class TestEmptyTestAdapter(BaseEmpty): + pass + + +class TestEphemeralTestAdapter(BaseEphemeral): + pass + + +class TestIncrementalTestAdapter(BaseIncremental): + pass + + +class TestGenericTestsTestAdapter(BaseGenericTests): + pass + + +class TestSnapshotCheckColsTestAdapter(BaseSnapshotCheckCols): + pass + + +class TestSnapshotTimestampTestAdapter(BaseSnapshotTimestamp): + pass + +class TestBaseAdapterMethodTestAdapter(BaseAdapterMethod): + pass \ No newline at end of file diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..fb8238b --- /dev/null +++ b/tox.ini @@ -0,0 +1,24 @@ +[tox] +skipsdist = True +envlist = py37,py38,py39 + +[testenv:{unit,py37,py38,py39,py}] +description = unit testing +skip_install = True +passenv = DBT_* PYTEST_ADOPTS +commands = {envpython} -m pytest {posargs} tests/unit +deps = + -rdev_requirements.txt + -e. + + +[testenv:{integration,py37,py38,py39,py}-{ testadapter }] +description = adapter plugin integration testing +skip_install = true +passenv = DBT_* TESTADAPTER_TEST_* PYTEST_ADOPTS +commands = + testadapter: {envpython} -m pytest -m profile_testadapter {posargs:test/integration} + testadapter: -m pytest {posargs} tests/functional +deps = + -rdev-requirements.txt + -e.