From 2c67e3f5c7d6e3b58b697a1a07a29d71ab36cbea Mon Sep 17 00:00:00 2001 From: Kyle Wigley Date: Wed, 24 Mar 2021 21:39:24 -0400 Subject: [PATCH] update tox, update makefile, run tests natively by default, general dev workflow cleanup --- .circleci/config.yml | 157 +-- CONTRIBUTING.md | 188 ++-- Dockerfile.test | 4 +- Makefile | 83 +- azure-pipelines.yml | 12 +- core/setup.py | 2 +- dev-requirements.txt | 15 + dev_requirements.txt | 15 - docker-compose.yml | 2 - ...uirements.txt => editable-requirements.txt | 0 scripts/build-dbt.py | 934 ------------------ scripts/dtr.py | 369 ------- .../test_cli_invocation.py | 4 +- test/integration/100_rpc_test/test_rpc.py | 35 +- test/integration/base.py | 7 +- test/rpc/conftest.py | 12 +- test/setup.sh | 10 - test/setup_db.sh | 1 - tox.ini | 428 +------- 19 files changed, 297 insertions(+), 1981 deletions(-) create mode 100644 dev-requirements.txt delete mode 100644 dev_requirements.txt rename editable_requirements.txt => editable-requirements.txt (100%) delete mode 100644 scripts/build-dbt.py delete mode 100755 scripts/dtr.py delete mode 100755 test/setup.sh diff --git a/.circleci/config.yml b/.circleci/config.yml index 1d8087901a1..efb7251f3e4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -7,7 +7,7 @@ jobs: DBT_INVOCATION_ENV: circle steps: - checkout - - run: tox -e flake8,mypy,unit-py36,unit-py38 + - run: tox -- -v build-wheels: docker: *test_only steps: @@ -28,7 +28,7 @@ jobs: - store_artifacts: path: ./dist destination: dist - integration-postgres-py36: + integration-postgres: docker: &test_and_postgres - image: fishtownanalytics/test-container:9 environment: @@ -50,113 +50,36 @@ jobs: PGPASSWORD: password PGDATABASE: postgres - run: - name: Run tests - command: tox -e integration-postgres-py36 + name: Postgres integration tests + command: tox -p -e py36-postgres,py37-postgres,py38-postgres,py39-postgres -- -v -n4 - store_artifacts: path: ./logs - integration-snowflake-py36: + integration-snowflake: docker: *test_only steps: - checkout - run: - name: Run tests - command: tox -e integration-snowflake-py36 + name: Snowflake integration tests + command: tox -p -e py36-snowflake,py37-snowflake,py38-snowflake -- -v -n4 no_output_timeout: 1h - store_artifacts: path: ./logs - integration-redshift-py36: + integration-redshift: docker: *test_only steps: - checkout - run: - name: Run tests - command: tox -e integration-redshift-py36 + name: Redshift integration tests + command: tox -p -e py36-redshift,py37-redshift,py38-redshift,py39-redshift -- -v -n4 - store_artifacts: path: ./logs - integration-bigquery-py36: + integration-bigquery: docker: *test_only steps: - checkout - run: - name: Run tests - command: tox -e integration-bigquery-py36 - - store_artifacts: - path: ./logs - integration-postgres-py38: - docker: *test_and_postgres - steps: - - checkout - - run: *setupdb - - run: - name: Run tests - command: tox -e integration-postgres-py38 - - store_artifacts: - path: ./logs - integration-snowflake-py38: - docker: *test_only - steps: - - checkout - - run: - name: Run tests - command: tox -e integration-snowflake-py38 - no_output_timeout: 1h - - store_artifacts: - path: ./logs - integration-redshift-py38: - docker: *test_only - steps: - - checkout - - run: - name: Run tests - command: tox -e integration-redshift-py38 - - store_artifacts: - path: ./logs - integration-bigquery-py38: - docker: *test_only - steps: - - checkout - - run: - name: Run tests - command: tox -e integration-bigquery-py38 - - store_artifacts: - path: ./logs - - integration-postgres-py39: - docker: *test_and_postgres - steps: - - checkout - - run: *setupdb - - run: - name: Run tests - command: tox -e integration-postgres-py39 - - store_artifacts: - path: ./logs - integration-snowflake-py39: - docker: *test_only - steps: - - checkout - - run: - name: Run tests - command: tox -e integration-snowflake-py39 - no_output_timeout: 1h - - store_artifacts: - path: ./logs - integration-redshift-py39: - docker: *test_only - steps: - - checkout - - run: - name: Run tests - command: tox -e integration-redshift-py39 - - store_artifacts: - path: ./logs - integration-bigquery-py39: - docker: *test_only - steps: - - checkout - - run: - name: Run tests - command: tox -e integration-bigquery-py39 + name: Bigquery integration test + command: tox -p -e py36-bigquery,py37-bigquery,py38-bigquery,py39-bigquery -- -v -n4 - store_artifacts: path: ./logs @@ -165,54 +88,22 @@ workflows: test-everything: jobs: - unit - - integration-postgres-py36: - requires: - - unit - - integration-redshift-py36: - requires: - - integration-postgres-py36 - - integration-bigquery-py36: - requires: - - integration-postgres-py36 - - integration-snowflake-py36: - requires: - - integration-postgres-py36 - - integration-postgres-py38: + - integration-postgres: requires: - unit - - integration-redshift-py38: - requires: - - integration-postgres-py38 - - integration-bigquery-py38: + - integration-redshift: requires: - - integration-postgres-py38 - - integration-snowflake-py38: - requires: - - integration-postgres-py38 - - integration-postgres-py39: - requires: - - unit - - integration-redshift-py39: + - integration-postgres + - integration-bigquery: requires: - - integration-postgres-py39 - - integration-bigquery-py39: + - integration-postgres + - integration-snowflake: requires: - - integration-postgres-py39 - # - integration-snowflake-py39: - # requires: - # - integration-postgres-py39 + - integration-postgres - build-wheels: requires: - unit - - integration-postgres-py36 - - integration-redshift-py36 - - integration-bigquery-py36 - - integration-snowflake-py36 - - integration-postgres-py38 - - integration-redshift-py38 - - integration-bigquery-py38 - - integration-snowflake-py38 - - integration-postgres-py39 - - integration-redshift-py39 - - integration-bigquery-py39 - # - integration-snowflake-py39 + - integration-postgres + - integration-redshift + - integration-bigquery + - integration-snowflake diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ed25fe4ece8..64d3e14a54c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,86 +1,86 @@ -# Contributing to dbt +# Contributing to `dbt` 1. [About this document](#about-this-document) 2. [Proposing a change](#proposing-a-change) 3. [Getting the code](#getting-the-code) 4. [Setting up an environment](#setting-up-an-environment) -5. [Running dbt in development](#running-dbt-in-development) +5. [Running `dbt` in development](#running-dbt-in-development) 6. [Testing](#testing) 7. [Submitting a Pull Request](#submitting-a-pull-request) ## About this document -This document is a guide intended for folks interested in contributing to dbt. Below, we document the process by which members of the community should create issues and submit pull requests (PRs) in this repository. It is not intended as a guide for using dbt, and it assumes a certain level of familiarity with Python concepts such as virtualenvs, `pip`, python modules, filesystems, and so on. This guide assumes you are using macOS or Linux and are comfortable with the command line. +This document is a guide intended for folks interested in contributing to `dbt`. Below, we document the process by which members of the community should create issues and submit pull requests (PRs) in this repository. It is not intended as a guide for using `dbt`, and it assumes a certain level of familiarity with Python concepts such as virtualenvs, `pip`, python modules, filesystems, and so on. This guide assumes you are using macOS or Linux and are comfortable with the command line. -If you're new to python development or contributing to open-source software, we encourage you to read this document from start to finish. If you get stuck, drop us a line in the #development channel on [slack](community.getdbt.com). +If you're new to python development or contributing to open-source software, we encourage you to read this document from start to finish. If you get stuck, drop us a line in the `#dbt-core-development` channel on [slack](https://community.getdbt.com). ### Signing the CLA -Please note that all contributors to dbt must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements) to have their Pull Request merged into the dbt codebase. If you are unable to sign the CLA, then the dbt maintainers will unfortunately be unable to merge your Pull Request. You are, however, welcome to open issues and comment on existing ones. +Please note that all contributors to `dbt` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements) to have their Pull Request merged into the `dbt` codebase. If you are unable to sign the CLA, then the `dbt` maintainers will unfortunately be unable to merge your Pull Request. You are, however, welcome to open issues and comment on existing ones. ## Proposing a change -dbt is Apache 2.0-licensed open source software. dbt is what it is today because community members like you have opened issues, provided feedback, and contributed to the knowledge loop for the entire communtiy. Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project. +`dbt` is Apache 2.0-licensed open source software. `dbt` is what it is today because community members like you have opened issues, provided feedback, and contributed to the knowledge loop for the entire communtiy. Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project. ### Defining the problem -If you have an idea for a new feature or if you've discovered a bug in dbt, the first step is to open an issue. Please check the list of [open issues](https://github.com/fishtown-analytics/dbt/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The dbt maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs. +If you have an idea for a new feature or if you've discovered a bug in `dbt`, the first step is to open an issue. Please check the list of [open issues](https://github.com/fishtown-analytics/dbt/issues) before creating a new one. If you find a relevant issue, please add a comment to the open issue instead of creating a new one. There are hundreds of open issues in this repository and it can be hard to know where to look for a relevant open issue. **The `dbt` maintainers are always happy to point contributors in the right direction**, so please err on the side of documenting your idea in a new issue if you are unsure where a problem statement belongs. -**Note:** All community-contributed Pull Requests _must_ be associated with an open issue. If you submit a Pull Request that does not pertain to an open issue, you will be asked to create an issue describing the problem before the Pull Request can be reviewed. +> **Note:** All community-contributed Pull Requests _must_ be associated with an open issue. If you submit a Pull Request that does not pertain to an open issue, you will be asked to create an issue describing the problem before the Pull Request can be reviewed. ### Discussing the idea -After you open an issue, a dbt maintainer will follow up by commenting on your issue (usually within 1-3 days) to explore your idea further and advise on how to implement the suggested changes. In many cases, community members will chime in with their own thoughts on the problem statement. If you as the issue creator are interested in submitting a Pull Request to address the issue, you should indicate this in the body of the issue. The dbt maintainers are _always_ happy to help contributors with the implementation of fixes and features, so please also indicate if there's anything you're unsure about or could use guidance around in the issue. +After you open an issue, a `dbt` maintainer will follow up by commenting on your issue (usually within 1-3 days) to explore your idea further and advise on how to implement the suggested changes. In many cases, community members will chime in with their own thoughts on the problem statement. If you as the issue creator are interested in submitting a Pull Request to address the issue, you should indicate this in the body of the issue. The `dbt` maintainers are _always_ happy to help contributors with the implementation of fixes and features, so please also indicate if there's anything you're unsure about or could use guidance around in the issue. ### Submitting a change -If an issue is appropriately well scoped and describes a beneficial change to the dbt codebase, then anyone may submit a Pull Request to implement the functionality described in the issue. See the sections below on how to do this. +If an issue is appropriately well scoped and describes a beneficial change to the `dbt` codebase, then anyone may submit a Pull Request to implement the functionality described in the issue. See the sections below on how to do this. -The dbt maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/fishtown-analytics/dbt/contribute) page. +The `dbt` maintainers will add a `good first issue` label if an issue is suitable for a first-time contributor. This label often means that the required code change is small, limited to one database adapter, or a net-new addition that does not impact existing functionality. You can see the list of currently open issues on the [Contribute](https://github.com/fishtown-analytics/dbt/contribute) page. Here's a good workflow: - Comment on the open issue, expressing your interest in contributing the required code change - Outline your planned implementation. If you want help getting started, ask! -- Follow the steps outlined below to develop locally. Once you have opened a PR, one of the dbt maintainers will work with you to review your code. -- Add a test! Tests are crucial for both fixes and new features alike. We want to make sure that code works as intended, and that it avoids any bugs previously encountered. Currently, the best resource for understanding dbt's [unit](test/unit) and [integration](test/integration) tests is the tests themselves. One of the maintainers can help by pointing out relevant examples. +- Follow the steps outlined below to develop locally. Once you have opened a PR, one of the `dbt` maintainers will work with you to review your code. +- Add a test! Tests are crucial for both fixes and new features alike. We want to make sure that code works as intended, and that it avoids any bugs previously encountered. Currently, the best resource for understanding `dbt`'s [unit](test/unit) and [integration](test/integration) tests is the tests themselves. One of the maintainers can help by pointing out relevant examples. -In some cases, the right resolution to an open issue might be tangential to the dbt codebase. The right path forward might be a documentation update or a change that can be made in user-space. In other cases, the issue might describe functionality that the dbt maintainers are unwilling or unable to incorporate into the dbt codebase. When it is determined that an open issue describes functionality that will not translate to a code change in the dbt repository, the issue will be tagged with the `wontfix` label (see below) and closed. +In some cases, the right resolution to an open issue might be tangential to the `dbt` codebase. The right path forward might be a documentation update or a change that can be made in user-space. In other cases, the issue might describe functionality that the `dbt` maintainers are unwilling or unable to incorporate into the `dbt` codebase. When it is determined that an open issue describes functionality that will not translate to a code change in the `dbt` repository, the issue will be tagged with the `wontfix` label (see below) and closed. ### Using issue labels -The dbt maintainers use labels to categorize open issues. Some labels indicate the databases impacted by the issue, while others describe the domain in the dbt codebase germane to the discussion. While most of these labels are self-explanatory (eg. `snowflake` or `bigquery`), there are others that are worth describing. +The `dbt` maintainers use labels to categorize open issues. Some labels indicate the databases impacted by the issue, while others describe the domain in the `dbt` codebase germane to the discussion. While most of these labels are self-explanatory (eg. `snowflake` or `bigquery`), there are others that are worth describing. | tag | description | | --- | ----------- | -| [triage](https://github.com/fishtown-analytics/dbt/labels/triage) | This is a new issue which has not yet been reviewed by a dbt maintainer. This label is removed when a maintainer reviews and responds to the issue. | -| [bug](https://github.com/fishtown-analytics/dbt/labels/bug) | This issue represents a defect or regression in dbt | -| [enhancement](https://github.com/fishtown-analytics/dbt/labels/enhancement) | This issue represents net-new functionality in dbt | -| [good first issue](https://github.com/fishtown-analytics/dbt/labels/good%20first%20issue) | This issue does not require deep knowledge of the dbt codebase to implement. This issue is appropriate for a first-time contributor. | -| [help wanted](https://github.com/fishtown-analytics/dbt/labels/help%20wanted) / [discussion](https://github.com/fishtown-analytics/dbt/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. | -| [duplicate](https://github.com/fishtown-analytics/dbt/issues/duplicate) | This issue is functionally identical to another open issue. The dbt maintainers will close this issue and encourage community members to focus conversation on the other one. | -| [snoozed](https://github.com/fishtown-analytics/dbt/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The dbt maintainers will revist these issues periodically and re-prioritize them accordingly. | -| [stale](https://github.com/fishtown-analytics/dbt/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by dbt maintainers, but they can be re-opened if the discussion is restarted. | -| [wontfix](https://github.com/fishtown-analytics/dbt/labels/wontfix) | This issue does not require a code change in the dbt repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. | +| [triage](https://github.com/fishtown-analytics/dbt/labels/triage) | This is a new issue which has not yet been reviewed by a `dbt` maintainer. This label is removed when a maintainer reviews and responds to the issue. | +| [bug](https://github.com/fishtown-analytics/dbt/labels/bug) | This issue represents a defect or regression in `dbt` | +| [enhancement](https://github.com/fishtown-analytics/dbt/labels/enhancement) | This issue represents net-new functionality in `dbt` | +| [good first issue](https://github.com/fishtown-analytics/dbt/labels/good%20first%20issue) | This issue does not require deep knowledge of the `dbt` codebase to implement. This issue is appropriate for a first-time contributor. | +| [help wanted](https://github.com/fishtown-analytics/`dbt`/labels/help%20wanted) / [discussion](https://github.com/fishtown-analytics/dbt/labels/discussion) | Conversation around this issue in ongoing, and there isn't yet a clear path forward. Input from community members is most welcome. | +| [duplicate](https://github.com/fishtown-analytics/dbt/issues/duplicate) | This issue is functionally identical to another open issue. The `dbt` maintainers will close this issue and encourage community members to focus conversation on the other one. | +| [snoozed](https://github.com/fishtown-analytics/dbt/labels/snoozed) | This issue describes a good idea, but one which will probably not be addressed in a six-month time horizon. The `dbt` maintainers will revist these issues periodically and re-prioritize them accordingly. | +| [stale](https://github.com/fishtown-analytics/dbt/labels/stale) | This is an old issue which has not recently been updated. Stale issues will periodically be closed by `dbt` maintainers, but they can be re-opened if the discussion is restarted. | +| [wontfix](https://github.com/fishtown-analytics/dbt/labels/wontfix) | This issue does not require a code change in the `dbt` repository, or the maintainers are unwilling/unable to merge a Pull Request which implements the behavior described in the issue. | #### Branching Strategy -dbt has three types of branches: +`dbt` has three types of branches: - **Trunks** are where active development of the next release takes place. There is one trunk named `develop` at the time of writing this, and will be the default branch of the repository. -- **Release Branches** track a specific, not yet complete release of dbt. Each minor version release has a corresponding release branch. For example, the `0.11.x` series of releases has a branch called `0.11.latest`. This allows us to release new patch versions under `0.11` without necessarily needing to pull them into the latest version of dbt. +- **Release Branches** track a specific, not yet complete release of `dbt`. Each minor version release has a corresponding release branch. For example, the `0.11.x` series of releases has a branch called `0.11.latest`. This allows us to release new patch versions under `0.11` without necessarily needing to pull them into the latest version of `dbt`. - **Feature Branches** track individual features and fixes. On completion they should be merged into the trunk brnach or a specific release branch. ## Getting the code ### Installing git -You will need `git` in order to download and modify the dbt source code. On macOS, the best way to download git is to just install [Xcode](https://developer.apple.com/support/xcode/). +You will need `git` in order to download and modify the `dbt` source code. On macOS, the best way to download git is to just install [Xcode](https://developer.apple.com/support/xcode/). ### External contributors -If you are not a member of the `fishtown-analytics` GitHub organization, you can contribute to dbt by forking the dbt repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to: +If you are not a member of the `fishtown-analytics` GitHub organization, you can contribute to `dbt` by forking the `dbt` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to: -1. fork the dbt repository +1. fork the `dbt` repository 2. clone your fork locally 3. check out a new branch for your proposed changes 4. push changes to your fork @@ -88,31 +88,30 @@ If you are not a member of the `fishtown-analytics` GitHub organization, you can ### Core contributors -If you are a member of the `fishtown-analytics` GitHub organization, you will have push access to the dbt repo. Rather than -forking dbt to make your changes, just clone the repository, check out a new branch, and push directly to that branch. +If you are a member of the `fishtown-analytics` GitHub organization, you will have push access to the `dbt` repo. Rather than forking `dbt` to make your changes, just clone the repository, check out a new branch, and push directly to that branch. ## Setting up an environment -There are some tools that will be helpful to you in developing locally. While this is the list relevant for dbt development, many of these tools are used commonly across open-source python projects. +There are some tools that will be helpful to you in developing locally. While this is the list relevant for `dbt` development, many of these tools are used commonly across open-source python projects. ### Tools -A short list of tools used in dbt testing that will be helpful to your understanding: +A short list of tools used in `dbt` testing that will be helpful to your understanding: -- [virtualenv](https://virtualenv.pypa.io/en/stable/) to manage dependencies -- [tox](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions -- [pytest](https://docs.pytest.org/en/latest/) to discover/run tests -- [make](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) - but don't worry too much, nobody _really_ understands how make works and our Makefile is super simple -- [flake8](https://gitlab.com/pycqa/flake8) for code linting +- [`tox`](https://tox.readthedocs.io/en/latest/) to manage virtualenvs across python versions. We currently target the latest patch releases for Python 3.6, Python 3.7, Python 3.8, and Python 3.9 +- [`pytest`](https://docs.pytest.org/en/latest/) to discover/run tests +- [`make`](https://users.cs.duke.edu/~ola/courses/programming/Makefiles/Makefiles.html) - but don't worry too much, nobody _really_ understands how make works and our Makefile is super simple +- [`flake8`](https://flake8.pycqa.org/en/latest/) for code linting +- [`mypy`](https://mypy.readthedocs.io/en/stable/) for static type checking - [CircleCI](https://circleci.com/product/) and [Azure Pipelines](https://azure.microsoft.com/en-us/services/devops/pipelines/) -A deep understanding of these tools in not required to effectively contribute to dbt, but we recommend checking out the attached documentation if you're interested in learning more about them. +A deep understanding of these tools in not required to effectively contribute to `dbt`, but we recommend checking out the attached documentation if you're interested in learning more about them. #### virtual environments -We strongly recommend using virtual environments when developing code in dbt. We recommend creating this virtualenv -in the root of the dbt repository. To create a new virtualenv, run: -``` +We strongly recommend using virtual environments when developing code in `dbt`. We recommend creating this virtualenv +in the root of the `dbt` repository. To create a new virtualenv, run: +```sh python3 -m venv env source env/bin/activate ``` @@ -128,23 +127,25 @@ Docker and docker-compose are both used in testing. Specific instructions for yo For testing, and later in the examples in this document, you may want to have `psql` available so you can poke around in the database and see what happened. We recommend that you use [homebrew](https://brew.sh/) for that on macOS, and your package manager on Linux. You can install any version of the postgres client that you'd like. On macOS, with homebrew setup, you can run: -``` +```sh brew install postgresql ``` -## Running dbt in development +## Running `dbt` in development ### Installation -First make sure that you set up your `virtualenv` as described in section _Setting up an environment_. Next, install dbt (and its dependencies) with: +First make sure that you set up your `virtualenv` as described in [Setting up an environment](#setting-up-an-environment). Next, install `dbt` (and its dependencies) with: -``` -pip install -r editable_requirements.txt +```sh +make dev +# or +pip install -r dev-requirements.txt -r editable-requirements.txt ``` -When dbt is installed from source in this way, any changes you make to the dbt source code will be reflected immediately in your next `dbt` run. +When `dbt` is installed this way, any changes you make to the `dbt` source code will be reflected immediately in your next `dbt` run. -### Running dbt +### Running `dbt` With your virtualenv activated, the `dbt` script should point back to the source code you've cloned on your machine. You can verify this by running `which dbt`. This command should show you a path to an executable in your virtualenv. @@ -152,76 +153,77 @@ Configure your [profile](https://docs.getdbt.com/docs/configure-your-profile) as ## Testing -Getting the dbt integration tests set up in your local environment will be very helpful as you start to make changes to your local version of dbt. The section that follows outlines some helpful tips for setting up the test environment. +Getting the `dbt` integration tests set up in your local environment will be very helpful as you start to make changes to your local version of `dbt`. The section that follows outlines some helpful tips for setting up the test environment. -### Running tests via Docker +Since `dbt` works with a number of different databases, you will need to supply credentials for one or more of these databases in your test environment. Most organizations don't have access to each of a BigQuery, Redshift, Snowflake, and Postgres database, so it's likely that you will be unable to run every integration test locally. Fortunately, Fishtown Analytics provides a CI environment with access to sandboxed Redshift, Snowflake, BigQuery, and Postgres databases. See the section on [_Submitting a Pull Request_](#submitting-a-pull-request) below for more information on this CI setup. -dbt's unit and integration tests run in Docker. Because dbt works with a number of different databases, you will need to supply credentials for one or more of these databases in your test environment. Most organizations don't have access to each of a BigQuery, Redshift, Snowflake, and Postgres database, so it's likely that you will be unable to run every integration test locally. Fortunately, Fishtown Analytics provides a CI environment with access to sandboxed Redshift, Snowflake, BigQuery, and Postgres databases. See the section on [_Submitting a Pull Request_](#submitting-a-pull-request) below for more information on this CI setup. +### Initial setup +We recommend starting with `dbt`'s Postgres tests. These tests cover most of the functionality in `dbt`, are the fastest to run, and are the easiest to set up. To run the Postgres integration tests, you'll have to do one extra step of setting up the test database: -### Specifying your test credentials +```sh +make setup-db +``` +or, alternatively: +```sh +docker-compose up -d database +PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh +``` -dbt uses test credentials specified in a `test.env` file in the root of the repository. This `test.env` file is git-ignored, but please be _extra_ careful to never check in credentials or other sensitive information when developing against dbt. To create your `test.env` file, copy the provided sample file, then supply your relevant credentials: +`dbt` uses test credentials specified in a `test.env` file in the root of the repository for non-Postgres databases. This `test.env` file is git-ignored, but please be _extra_ careful to never check in credentials or other sensitive information when developing against `dbt`. To create your `test.env` file, copy the provided sample file, then supply your relevant credentials. This step is only required to use non-Postgres databases. ``` cp test.env.sample test.env +$EDITOR test.env ``` -We recommend starting with dbt's Postgres tests. These tests cover most of the functionality in dbt, are the fastest to run, and are the easiest to set up. dbt's test suite runs Postgres in a Docker container, so no setup should be required to run these tests. - -If you additionally want to test Snowflake, Bigquery, or Redshift, locally you'll need to get credentials and add them to the `test.env` file. In general, it's most important to have successful unit and Postgres tests. Once you open a PR, dbt will automatically run integration tests for the other three core database adapters. Of course, if you are a BigQuery user, contributing a BigQuery-only feature, it's important to run BigQuery tests as well. +> In general, it's most important to have successful unit and Postgres tests. Once you open a PR, `dbt` will automatically run integration tests for the other three core database adapters. Of course, if you are a BigQuery user, contributing a BigQuery-only feature, it's important to run BigQuery tests as well. ### Test commands -dbt's unit tests and Python linter can be run with: +There are a few methods for running tests locally. -``` -make test-unit -``` +#### Makefile -To run the Postgres + Python 3.6 integration tests, you'll have to do one extra step of setting up the test database: +There mulitple targets in the Makefile to run common test suites and code +checks, most notably: +```sh +# Runs unit tests with py38 and code checks in parallel. +make test ``` -docker-compose up -d database -PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh -``` - -To run a quick test for Python3 integration tests on Postgres, you can run: +> These make targets assume you have [`tox`](https://tox.readthedocs.io/en/latest/) installed locally +> unless you run them with a Docker container. See `make help` for more info. -``` -make test-quick -``` +Check out the other targets in the Makefile to see other commonly used test +suites. -To run tests for a specific database, invoke `tox` directly with the required flags: -``` -# Run Postgres py36 tests -docker-compose run test tox -e integration-postgres-py36 -- -x +#### `tox` -# Run Snowflake py36 tests -docker-compose run test tox -e integration-snowflake-py36 -- -x +[`tox`](https://tox.readthedocs.io/en/latest/) takes care of managing virtualenvs and install dependencies in order to run +tests. You can also run tests in parallel, for example, you can run unit tests +for Python 3.6, Python 3.7, Python 3.8, `flake8` checks, and `mypy` checks in +parallel with `tox -p`. Also, you can run unit tests for specific python versions +with `tox -e py36`. The configuration for these tests in located in `tox.ini` -# Run BigQuery py36 tests -docker-compose run test tox -e integration-bigquery-py36 -- -x +#### `pytest` -# Run Redshift py36 tests -docker-compose run test tox -e integration-redshift-py36 -- -x +Finally, you can also run a specific test or group of tests using [`pytest`](https://docs.pytest.org/en/latest/) directly. With a virtualenv +active and dev dependencies installed you can do things like: +```sh +# run specific postgres integration tests +python -m pytest -m profile_postgres test/integration/001_simple_copy_test +# run all unit tests in a file +python -m pytest test/unit/test_graph.py +# run a specific unit test +python -m pytest test/unit/test_graph.py::GraphTest::test__dependency_list ``` - -To run a specific test by itself: -``` -docker-compose run test tox -e explicit-py36 -- -s -x -m profile_{adapter} {path_to_test_file_or_folder} -``` -E.g. -``` -docker-compose run test tox -e explicit-py36 -- -s -x -m profile_snowflake test/integration/001_simple_copy_test -``` - -See the `Makefile` contents for more some other examples of ways to run `tox`. +> [Here](https://docs.pytest.org/en/reorganize-docs/new-docs/user/commandlineuseful.html) is a list of useful command-line options for `pytest` to use while developing. ## Submitting a Pull Request Fishtown Analytics provides a sandboxed Redshift, Snowflake, and BigQuery database for use in a CI environment. When pull requests are submitted to the `fishtown-analytics/dbt` repo, GitHub will trigger automated tests in CircleCI and Azure Pipelines. -A dbt maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. +A `dbt` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. -Once all tests are passing and your PR has been approved, a dbt maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada: +Once all tests are passing and your PR has been approved, a `dbt` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada: diff --git a/Dockerfile.test b/Dockerfile.test index 7d114a87f5b..5771fdef30b 100644 --- a/Dockerfile.test +++ b/Dockerfile.test @@ -46,9 +46,7 @@ RUN curl -LO https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_V && tar -C /usr/local/bin -xzvf dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz \ && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz -RUN pip3 install -U "tox==3.14.4" wheel "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools -# tox fails if the 'python' interpreter (python2) doesn't have `tox` installed -RUN pip install -U "tox==3.14.4" "six>=1.14.0,<1.15.0" "virtualenv==20.0.3" setuptools +RUN pip3 install -U tox wheel six setuptools # These args are passed in via docker-compose, which reads then from the .env file. # On Linux, run `make .env` to create the .env file for the current user. diff --git a/Makefile b/Makefile index ced0dbfc058..8a47fb105d7 100644 --- a/Makefile +++ b/Makefile @@ -1,29 +1,63 @@ -.PHONY: install test test-unit test-integration +.DEFAULT_GOAL:=help -changed_tests := `git status --porcelain | grep '^\(M\| M\|A\| A\)' | awk '{ print $$2 }' | grep '\/test_[a-zA-Z_\-\.]\+.py'` +# Optional flag to run target in a docker container. +# (example `make test USE_DOCKER=true`) +ifeq ($(USE_DOCKER),true) + DOCKER_CMD := docker-compose run --rm test +endif + +.PHONY: dev +dev: ## Installs dbt-* packages in develop mode and development dependencies. + pip install -r dev-requirements.txt -r editable-requirements.txt + +.PHONY: mypy +mypy: .env ## Runs mypy for static type checking. + $(DOCKER_CMD) tox -e mypy + +.PHONY: flake8 +flake8: .env ## Runs flake8 to enforce style guide. + $(DOCKER_CMD) tox -e flake8 + +.PHONY: lint +lint: .env ## Runs all code checks in parallel. + $(DOCKER_CMD) tox -p -e flake8,mypy -install: - pip install -e . +.PHONY: unit +unit: .env ## Runs unit tests with py38. + $(DOCKER_CMD) tox -e py38 -test: .env - @echo "Full test run starting..." - @time docker-compose run --rm test tox +.PHONY: test +test: .env ## Runs unit tests with py38 and code checks in parallel. + $(DOCKER_CMD) tox -p -e py38,flake8,mypy -test-unit: .env - @echo "Unit test run starting..." - @time docker-compose run --rm test tox -e unit-py36,flake8 +.PHONY: integration +integration: .env ## Runs all integration tests with py38 in "fail fast" mode. + $(DOCKER_CMD) tox -p -e py38-postgres,py38-bigquery,py38-redshift,py38-snowflake -- -x -n4 -test-integration: .env - @echo "Integration test run starting..." - @time docker-compose run --rm test tox -e integration-postgres-py36,integration-redshift-py36,integration-snowflake-py36,integration-bigquery-py36 +.PHONY: integration-postgres +integration-postgres: .env ## Runs postgres integration tests with py38 in "fail fast" mode. + $(DOCKER_CMD) tox -e py38-postgres -- -x -n4 -test-quick: .env - @echo "Integration test run starting..." - @time docker-compose run --rm test tox -e integration-postgres-py36 -- -x +.PHONY: integration-redshift +integration-redshift: .env ## Runs redshift integration tests with py38 in "fail fast" mode. + $(DOCKER_CMD) tox -e py38-redshift -- -x -n4 + +.PHONY: integration-snowflake +integration-snowflake: .env ## Runs snowflake integration tests with py38 in "fail fast" mode. + $(DOCKER_CMD) tox -e py38-snowflake -- -x -n4 + +.PHONY: integration-bigquery +integration-bigquery: .env ## Runs bigquery integration tests with py38 in "fail fast" mode. + $(DOCKER_CMD) tox -e py38-bigquery -- -x -n4 + +.PHONY: setup-db +setup-db: ## Setup Postgres database with docker-compose for system testing. + docker-compose up -d database + PGHOST=localhost PGUSER=root PGPASSWORD=password PGDATABASE=postgres bash test/setup_db.sh # This rule creates a file named .env that is used by docker-compose for passing # the USER_ID and GROUP_ID arguments to the Docker image. -.env: +.env: ## Setup step for using using docker-compose with make target. @touch .env ifneq ($(OS),Windows_NT) ifneq ($(shell uname -s), Darwin) @@ -31,9 +65,9 @@ ifneq ($(shell uname -s), Darwin) @echo GROUP_ID=$(shell id -g) >> .env endif endif - @time docker-compose build -clean: +.PHONY: clean +clean: ## Resets development environment. rm -f .coverage rm -rf .eggs/ rm -f .env @@ -47,3 +81,14 @@ clean: rm -rf target/ find . -type f -name '*.pyc' -delete find . -type d -name '__pycache__' -depth -delete + +.PHONY: help +help: ## Show this help message. + @echo 'usage: make [target] [USE_DOCKER=true]' + @echo + @echo 'targets:' + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + @echo + @echo 'options:' + @echo 'use USE_DOCKER=true to run target in a docker container' + diff --git a/azure-pipelines.yml b/azure-pipelines.yml index de297391026..c12d124a02f 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -23,7 +23,7 @@ jobs: - script: python -m pip install --upgrade pip && pip install tox displayName: 'Install dependencies' - - script: python -m tox -e pywin-unit + - script: python -m tox -e py -- -v displayName: Run unit tests - job: PostgresIntegrationTest @@ -54,7 +54,7 @@ jobs: - script: python -m pip install --upgrade pip && pip install tox displayName: 'Install dependencies' - - script: python -m tox -e pywin-postgres + - script: python -m tox -e py-postgres -- -v displayName: Run integration tests # These three are all similar except secure environment variables, which MUST be passed along to their tasks, @@ -73,7 +73,7 @@ jobs: - script: python -m pip install --upgrade pip && pip install tox displayName: 'Install dependencies' - - script: python -m tox -e pywin-snowflake + - script: python -m tox -e py-snowflake -- -v env: SNOWFLAKE_TEST_ACCOUNT: $(SNOWFLAKE_TEST_ACCOUNT) SNOWFLAKE_TEST_PASSWORD: $(SNOWFLAKE_TEST_PASSWORD) @@ -96,7 +96,7 @@ jobs: architecture: 'x64' - script: python -m pip install --upgrade pip && pip install tox displayName: 'Install dependencies' - - script: python -m tox -e pywin-bigquery + - script: python -m tox -e py-bigquery -- -v env: BIGQUERY_SERVICE_ACCOUNT_JSON: $(BIGQUERY_SERVICE_ACCOUNT_JSON) displayName: Run integration tests @@ -115,7 +115,7 @@ jobs: - script: python -m pip install --upgrade pip && pip install tox displayName: 'Install dependencies' - - script: python -m tox -e pywin-redshift + - script: python -m tox -e py-redshift -- -v env: REDSHIFT_TEST_DBNAME: $(REDSHIFT_TEST_DBNAME) REDSHIFT_TEST_PASS: $(REDSHIFT_TEST_PASS) @@ -139,7 +139,7 @@ jobs: inputs: versionSpec: '3.7' architecture: 'x64' - - script: python -m pip install --upgrade pip setuptools && python -m pip install -r requirements.txt && python -m pip install -r dev_requirements.txt + - script: python -m pip install --upgrade pip setuptools && python -m pip install -r requirements.txt && python -m pip install -r dev-requirements.txt displayName: Install dependencies - task: ShellScript@2 inputs: diff --git a/core/setup.py b/core/setup.py index 1b7e1de3763..3cdfa7d8ff5 100644 --- a/core/setup.py +++ b/core/setup.py @@ -64,7 +64,7 @@ def read(fname): 'sqlparse>=0.2.3,<0.4', 'networkx>=2.3,<3', 'minimal-snowplow-tracker==0.0.2', - 'colorama>=0.3.9,<0.4.4', + 'colorama>=0.3.9,<0.4.5', 'agate>=1.6,<1.6.2', 'isodate>=0.6,<0.7', 'json-rpc>=1.12,<2', diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 00000000000..7560797649b --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1,15 @@ +bumpversion +flake8 +flaky +freezegun==0.3.12 +ipdb +mypy==0.782 +pip-tools +pytest +pytest-dotenv +pytest-logbook +pytest-xdist +pytz +tox +twine +wheel diff --git a/dev_requirements.txt b/dev_requirements.txt deleted file mode 100644 index a7887c64529..00000000000 --- a/dev_requirements.txt +++ /dev/null @@ -1,15 +0,0 @@ -freezegun==0.3.12 -pytest==5.4.3 -flake8>=3.5.0 -pytz==2017.2 -bumpversion==0.5.3 -tox==3.14.4 -virtualenv==20.0.3 -six>=1.14.0 -ipdb -pytest-xdist>=1.28.0,<2 -flaky>=3.5.3,<4 -mypy==0.782 -wheel -twine -pytest-logbook>=1.2.0,<1.3 diff --git a/docker-compose.yml b/docker-compose.yml index abda125ac85..44af7bd7ec8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,8 +18,6 @@ services: USER_ID: ${USER_ID:-} GROUP_ID: ${GROUP_ID:-} command: "/root/.virtualenvs/dbt/bin/pytest" - env_file: - - ./test.env volumes: - .:/usr/app working_dir: /usr/app diff --git a/editable_requirements.txt b/editable-requirements.txt similarity index 100% rename from editable_requirements.txt rename to editable-requirements.txt diff --git a/scripts/build-dbt.py b/scripts/build-dbt.py deleted file mode 100644 index 80cd88afec3..00000000000 --- a/scripts/build-dbt.py +++ /dev/null @@ -1,934 +0,0 @@ -import json -import os -import re -import shutil -import subprocess -import sys -import tempfile -import textwrap -import time -import venv # type: ignore -import zipfile - -from typing import Dict - -from argparse import ArgumentParser -from dataclasses import dataclass -from pathlib import Path -from urllib.request import urlopen - -from typing import Optional, Iterator, Tuple, List, Iterable - - -HOMEBREW_PYTHON = (3, 8) - - -# This should match the pattern in .bumpversion.cfg -VERSION_PATTERN = re.compile( - r'(?P\d+)\.(?P\d+)\.(?P\d+)' - r'((?P[a-z]+)(?P\d+))?' -) - - -class Version: - def __init__(self, raw: str) -> None: - self.raw = raw - match = VERSION_PATTERN.match(self.raw) - assert match is not None, f'Invalid version: {self.raw}' - groups = match.groupdict() - - self.major: int = int(groups['major']) - self.minor: int = int(groups['minor']) - self.patch: int = int(groups['patch']) - self.prerelease: Optional[str] = None - self.num: Optional[int] = None - - if groups['num'] is not None: - self.prerelease = groups['prerelease'] - self.num = int(groups['num']) - - def __str__(self): - return self.raw - - def homebrew_class_name(self) -> str: - name = f'DbtAT{self.major}{self.minor}{self.patch}' - if self.prerelease is not None and self.num is not None: - name = f'{name}{self.prerelease.title()}{self.num}' - return name - - def homebrew_filename(self): - version_str = f'{self.major}.{self.minor}.{self.patch}' - if self.prerelease is not None and self.num is not None: - version_str = f'{version_str}-{self.prerelease}{self.num}' - return f'dbt@{version_str}.rb' - - -@dataclass -class Arguments: - version: Version - part: str - path: Path - homebrew_path: Path - homebrew_set_default: bool - set_version: bool - build_pypi: bool - upload_pypi: bool - test_upload: bool - build_homebrew: bool - build_docker: bool - upload_docker: bool - write_requirements: bool - write_dockerfile: bool - - @classmethod - def parse(cls) -> 'Arguments': - parser = ArgumentParser( - prog="Bump dbt's version, build packages" - ) - parser.add_argument( - 'version', - type=Version, - help="The version to set", - ) - parser.add_argument( - 'part', - type=str, - help="The part of the version to update", - ) - parser.add_argument( - '--path', - type=Path, - help='The path to the dbt repository', - default=Path.cwd(), - ) - parser.add_argument( - '--homebrew-path', - type=Path, - help='The path to the dbt homebrew install', - default=(Path.cwd() / '../homebrew-dbt'), - ) - parser.add_argument( - '--homebrew-set-default', - action='store_true', - help='If set, make this homebrew version the default', - ) - parser.add_argument( - '--no-set-version', - dest='set_version', - action='store_false', - help='Skip bumping the version', - ) - parser.add_argument( - '--no-build-pypi', - dest='build_pypi', - action='store_false', - help='skip building pypi', - ) - parser.add_argument( - '--no-build-docker', - dest='build_docker', - action='store_false', - help='skip building docker images', - ) - parser.add_argument( - '--no-upload-docker', - dest='upload_docker', - action='store_false', - help='skip uploading docker images', - ) - - uploading = parser.add_mutually_exclusive_group() - - uploading.add_argument( - '--upload-pypi', - dest='force_upload_pypi', - action='store_true', - help='upload to pypi even if building is disabled' - ) - - uploading.add_argument( - '--no-upload-pypi', - dest='no_upload_pypi', - action='store_true', - help='skip uploading to pypi', - ) - - parser.add_argument( - '--no-upload', - dest='test_upload', - action='store_false', - help='Skip uploading to pypitest', - ) - - parser.add_argument( - '--no-build-homebrew', - dest='build_homebrew', - action='store_false', - help='Skip building homebrew packages', - ) - parser.add_argument( - '--no-write-requirements', - dest='write_requirements', - action='store_false', - help='Skip writing the requirements file. It must exist.' - ) - parser.add_argument( - '--no-write-dockerfile', - dest='write_dockerfile', - action='store_false', - help='Skip writing the dockerfile. It must exist.' - ) - parsed = parser.parse_args() - - upload_pypi = parsed.build_pypi - if parsed.force_upload_pypi: - upload_pypi = True - elif parsed.no_upload_pypi: - upload_pypi = False - - return cls( - version=parsed.version, - part=parsed.part, - path=parsed.path, - homebrew_path=parsed.homebrew_path, - homebrew_set_default=parsed.homebrew_set_default, - set_version=parsed.set_version, - build_pypi=parsed.build_pypi, - upload_pypi=upload_pypi, - test_upload=parsed.test_upload, - build_homebrew=parsed.build_homebrew, - build_docker=parsed.build_docker, - upload_docker=parsed.upload_docker, - write_requirements=parsed.write_requirements, - write_dockerfile=parsed.write_dockerfile, - ) - - -def collect_output(cmd, cwd=None, stderr=subprocess.PIPE) -> str: - try: - result = subprocess.run( - cmd, cwd=cwd, check=True, stdout=subprocess.PIPE, stderr=stderr - ) - except subprocess.CalledProcessError as exc: - print(f'Command {exc.cmd} failed') - if exc.output: - print(exc.output.decode('utf-8')) - if exc.stderr: - print(exc.stderr.decode('utf-8'), file=sys.stderr) - raise - return result.stdout.decode('utf-8') - - -def run_command(cmd, cwd=None) -> None: - result = collect_output(cmd, stderr=subprocess.STDOUT, cwd=cwd) - print(result) - - -def set_version(path: Path, version: Version, part: str): - # bumpversion --commit --no-tag --new-version "${version}" "${port}" - cmd = [ - 'bumpversion', '--commit', '--no-tag', '--new-version', - str(version), part - ] - print(f'bumping version to {version}') - run_command(cmd, cwd=path) - print(f'bumped version to {version}') - - -class PypiBuilder: - _SUBPACKAGES = ( - 'core', - 'plugins/postgres', - 'plugins/redshift', - 'plugins/bigquery', - 'plugins/snowflake', - ) - - def __init__(self, dbt_path: Path): - self.dbt_path = dbt_path - - @staticmethod - def _dist_for(path: Path, make=False) -> Path: - dist_path = path / 'dist' - if dist_path.exists(): - shutil.rmtree(dist_path) - if make: - os.makedirs(dist_path) - build_path = path / 'build' - if build_path.exists(): - shutil.rmtree(build_path) - return dist_path - - @staticmethod - def _build_pypi_package(path: Path): - print(f'building package in {path}') - cmd = ['python', 'setup.py', 'sdist', 'bdist_wheel'] - run_command(cmd, cwd=path) - print(f'finished building package in {path}') - - @staticmethod - def _all_packages_in(path: Path) -> Iterator[Path]: - path = path / 'dist' - for pattern in ('*.tar.gz', '*.whl'): - yield from path.glob(pattern) - - def _build_subpackage(self, name: str) -> Iterator[Path]: - subpath = self.dbt_path / name - self._dist_for(subpath) - self._build_pypi_package(subpath) - return self._all_packages_in(subpath) - - def build(self): - print('building pypi packages') - dist_path = self._dist_for(self.dbt_path) - sub_pkgs: List[Path] = [] - for path in self._SUBPACKAGES: - sub_pkgs.extend(self._build_subpackage(path)) - - # now build the main package - self._build_pypi_package(self.dbt_path) - # now copy everything from the subpackages in - for package in sub_pkgs: - shutil.copy(str(package), dist_path) - - print('built pypi packages') - - def upload(self, *, test=True): - cmd = ['twine', 'check'] - cmd.extend(str(p) for p in self._all_packages_in(self.dbt_path)) - run_command(cmd) - cmd = ['twine', 'upload'] - if test: - cmd.extend(['--repository', 'pypitest']) - cmd.extend(str(p) for p in self._all_packages_in(self.dbt_path)) - print('uploading packages: {}'.format(' '.join(cmd))) - run_command(cmd) - print('uploaded packages') - - -class PipInstaller(venv.EnvBuilder): - def __init__(self, packages: List[str]) -> None: - super().__init__(with_pip=True) - self.packages = packages - - def post_setup(self, context): - # we can't run from the dbt directory or this gets all weird, so - # install from an empty temp directory and then remove it. - tmp = tempfile.mkdtemp() - cmd = [context.env_exe, '-m', 'pip', 'install', '--upgrade'] - cmd.extend(self.packages) - print(f'installing {self.packages}') - try: - run_command(cmd, cwd=tmp) - finally: - os.rmdir(tmp) - print(f'finished installing {self.packages}') - - def create(self, venv_path): - os.makedirs(venv_path.parent, exist_ok=True) - if venv_path.exists(): - shutil.rmtree(venv_path) - return super().create(venv_path) - - -def _require_wheels(dbt_path: Path) -> List[Path]: - dist_path = dbt_path / 'dist' - wheels = list(dist_path.glob('*.whl')) - if not wheels: - raise ValueError( - f'No wheels found in {dist_path} - run scripts/build-wheels.sh' - ) - return wheels - - -class DistFolderEnv(PipInstaller): - def __init__(self, dbt_path: Path) -> None: - self.wheels = _require_wheels(dbt_path) - super().__init__(packages=self.wheels) - - -class HomebrewVirtualenv(PipInstaller): - def __init__(self, dbt_version: Version) -> None: - super().__init__([f'dbt=={dbt_version}']) - - -@dataclass -class HomebrewDependency: - name: str - url: str - sha256: str - version: str - - def render(self, indent: int = 2) -> str: - result = textwrap.dedent(f'''\ - resource "{self.name}" do # {self.name}=={self.version} - url "{self.url}" - sha256 "{self.sha256}" - end - ''') - return textwrap.indent(result, ' '*indent) - - def __str__(self) -> str: - return self.render(indent=0) - - -@dataclass -class HomebrewTemplate: - url_data: str - hash_data: str - dependencies: List[HomebrewDependency] - - -def _make_venv_at(root: Path, name: str, builder: venv.EnvBuilder): - venv_path = root / name - os.makedirs(root, exist_ok=True) - if venv_path.exists(): - shutil.rmtree(venv_path) - - builder.create(venv_path) - return venv_path - - -class HomebrewBuilder: - def __init__( - self, - dbt_path: Path, - version: Version, - homebrew_path: Path, - set_default: bool, - ) -> None: - self.dbt_path = dbt_path - self.version = version - self.homebrew_path = homebrew_path - self.set_default = set_default - self._template: Optional[HomebrewTemplate] = None - - def make_venv(self) -> HomebrewVirtualenv: - env = HomebrewVirtualenv(self.version) - max_attempts = 10 - for attempt in range(1, max_attempts+1): - # after uploading to pypi, it can take a few minutes for installing - # to work. Retry a few times... - try: - env.create(self.homebrew_venv_path) - return - except subprocess.CalledProcessError: - if attempt == max_attempts: - raise - else: - print( - f'installation failed - waiting 60s for pypi to see ' - f'the new version (attempt {attempt}/{max_attempts})' - ) - time.sleep(60) - - return env - - @property - def versioned_formula_path(self) -> Path: - return ( - self.homebrew_path / 'Formula' / self.version.homebrew_filename() - ) - - @property - def default_formula_path(self) -> Path: - return ( - self.homebrew_path / 'Formula/dbt.rb' - ) - - @property - def homebrew_venv_path(self) -> Path: - return self.dbt_path / 'build' / 'homebrew-venv' - - @staticmethod - def _dbt_homebrew_formula_fmt() -> str: - return textwrap.dedent('''\ - class {formula_name} < Formula - include Language::Python::Virtualenv - - desc "Data build tool" - homepage "https://github.com/fishtown-analytics/dbt" - url "{url_data}" - sha256 "{hash_data}" - revision 1 - - bottle do - root_url "http://bottles.getdbt.com" - # bottle hashes + versions go here - end - - depends_on "openssl@1.1" - depends_on "postgresql" - depends_on "python" - - {dependencies} - {trailer} - end - ''') - - @staticmethod - def _dbt_homebrew_trailer() -> str: - dedented = textwrap.dedent('''\ - def install - venv = virtualenv_create(libexec, "python3") - - res = resources.map(&:name).to_set - - res.each do |r| - venv.pip_install resource(r) - end - - venv.pip_install_and_link buildpath - - bin.install_symlink "#{libexec}/bin/dbt" => "dbt" - end - - test do - (testpath/"dbt_project.yml").write( - "{name: 'test', version: '0.0.1', profile: 'default'}", - ) - (testpath/".dbt/profiles.yml").write( - "{default: {outputs: {default: {type: 'postgres', threads: 1, - host: 'localhost', port: 5432, user: 'root', pass: 'password', - dbname: 'test', schema: 'test'}}, target: 'default'}}", - ) - (testpath/"models/test.sql").write("select * from test") - system "#{bin}/dbt", "test" - end''') - return textwrap.indent(dedented, ' ') - - def get_formula_data( - self, versioned: bool = True - ) -> str: - fmt = self._dbt_homebrew_formula_fmt() - trailer = self._dbt_homebrew_trailer() - if versioned: - formula_name = self.version.homebrew_class_name() - else: - formula_name = 'Dbt' - - dependencies_str = '\n'.join( - d.render() for d in self.template.dependencies - ) - - return fmt.format( - formula_name=formula_name, - version=self.version, - url_data=self.template.url_data, - hash_data=self.template.hash_data, - dependencies=dependencies_str, - trailer=trailer, - ) - - @property - def template(self) -> HomebrewTemplate: - if self._template is None: - self.make_venv() - print('done setting up virtualenv') - - dependencies = [] - dbt_package = None - for pkg in self._get_packages(): - if pkg.name == 'dbt': - if pkg.version != str(self.version): - raise ValueError( - f'Found an invalid dbt=={pkg.version}, ' - f'expected dbt=={self.version}' - ) - dbt_package = pkg - else: - # we can assume that anything starting with dbt- in a fresh - # venv is a dbt package, I hope - if pkg.name.startswith('dbt-'): - if pkg.version != str(self.version): - raise ValueError( - f'Found an invalid {pkg.name}=={pkg.version}, ' - f'expected {pkg.name}=={self.version}' - ) - dependencies.append(pkg) - if dbt_package is None: - raise RuntimeError( - 'never found dbt in "pip freeze -l" output' - ) - template = HomebrewTemplate( - url_data=dbt_package.url, - hash_data=dbt_package.sha256, - dependencies=dependencies, - ) - self._template = template - else: - template = self._template - return template - - def _get_pypi_info(self, pkg: str, version: str) -> Tuple[str, str]: - fp = urlopen(f'https://pypi.org/pypi/{pkg}/{version}/json') - try: - data = json.load(fp) - finally: - fp.close() - assert 'urls' in data - for pkginfo in data['urls']: - assert 'packagetype' in pkginfo - if pkginfo['packagetype'] == 'sdist': - assert 'url' in pkginfo - assert 'digests' in pkginfo - assert 'sha256' in pkginfo['digests'] - url = pkginfo['url'] - sha256 = pkginfo['digests']['sha256'] - return url, sha256 - raise ValueError(f'Never got a valid sdist for {pkg}=={version}') - - def _get_packages(self) -> Iterator[HomebrewDependency]: - pip = self.homebrew_venv_path / 'bin/pip' - cmd = [pip, 'freeze', '-l'] - raw = collect_output(cmd).split('\n') - for line in raw: - if not line: - continue - parts = line.split('==') - if len(parts) != 2: - raise ValueError( - f'Could not parse pip freeze output line: {line}' - ) - name, version = parts - url, sha256 = self._get_pypi_info(name, version) - dep = HomebrewDependency( - name=name, url=url, sha256=sha256, version=version - ) - yield dep - - def _remove_dbt_resource(self, lines: List[str]) -> Iterator[str]: - # TODO: fork poet or extract the good bits to avoid this - line_iter = iter(lines) - # don't do a double-newline or "brew audit" gets mad - for line in line_iter: - # skip the contents of the "dbt" resource block. - if line.strip() == 'resource "dbt" do': - for skip in line_iter: - if skip.strip() == 'end': - # skip the newline after 'end' - next(line_iter) - break - else: - yield line - - def create_versioned_formula_file(self): - formula_contents = self.get_formula_data(versioned=True) - if self.versioned_formula_path.exists(): - print('Homebrew formula path already exists, overwriting') - self.versioned_formula_path.write_text(formula_contents) - - def commit_versioned_formula(self): - # add a commit for the new formula - run_command( - ['git', 'add', self.versioned_formula_path], - cwd=self.homebrew_path - ) - run_command( - ['git', 'commit', '-m', f'add dbt@{self.version}'], - cwd=self.homebrew_path - ) - - def commit_default_formula(self): - run_command( - ['git', 'add', self.default_formula_path], - cwd=self.homebrew_path - ) - run_command( - ['git', 'commit', '-m', f'upgrade dbt to {self.version}'], - cwd=self.homebrew_path - ) - - @staticmethod - def run_tests(formula_path: Path, audit: bool = True): - path = os.path.normpath(formula_path) - run_command(['brew', 'uninstall', '--force', path]) - versions = [ - l.strip() for l in - collect_output(['brew', 'list']).split('\n') - if l.strip().startswith('dbt@') or l.strip() == 'dbt' - ] - if versions: - run_command(['brew', 'unlink'] + versions) - run_command(['brew', 'install', path]) - run_command(['brew', 'test', path]) - if audit: - run_command(['brew', 'audit', '--strict', path]) - - def create_default_package(self): - os.remove(self.default_formula_path) - formula_contents = self.get_formula_data(versioned=False) - self.default_formula_path.write_text(formula_contents) - - def build(self): - self.create_versioned_formula_file() - # self.run_tests(formula_path=self.versioned_formula_path) - self.commit_versioned_formula() - - if self.set_default: - self.create_default_package() - # self.run_tests(formula_path=self.default_formula_path, audit=False) - self.commit_default_formula() - - -class WheelInfo: - def __init__(self, path): - self.path = path - - @staticmethod - def _extract_distinfo_path(wfile: zipfile.ZipFile) -> zipfile.Path: - zpath = zipfile.Path(root=wfile) - for path in zpath.iterdir(): - if path.name.endswith('.dist-info'): - return path - raise ValueError('Wheel with no dist-info?') - - def get_metadata(self) -> Dict[str, str]: - with zipfile.ZipFile(self.path) as wf: - distinfo = self._extract_distinfo_path(wf) - metadata = distinfo / 'METADATA' - metadata_dict: Dict[str, str] = {} - for line in metadata.read_text().split('\n'): - parts = line.split(': ', 1) - if len(parts) == 2: - metadata_dict[parts[0]] = parts[1] - return metadata_dict - - def package_name(self) -> str: - metadata = self.get_metadata() - if 'Name' not in metadata: - raise ValueError('Wheel with no name?') - return metadata['Name'] - - -class DockerBuilder: - """The docker builder requires the existence of a dbt package""" - def __init__(self, dbt_path: Path, version: Version) -> None: - self.dbt_path = dbt_path - self.version = version - - @property - def docker_path(self) -> Path: - return self.dbt_path / 'docker' - - @property - def dockerfile_name(self) -> str: - return f'Dockerfile.{self.version}' - - @property - def dockerfile_path(self) -> Path: - return self.docker_path / self.dockerfile_name - - @property - def requirements_path(self) -> Path: - return self.docker_path / 'requirements' - - @property - def requirements_file_name(self) -> str: - return f'requirements.{self.version}.txt' - - @property - def dockerfile_venv_path(self) -> Path: - return self.dbt_path / 'build' / 'docker-venv' - - @property - def requirements_txt_path(self) -> Path: - return self.requirements_path / self.requirements_file_name - - def make_venv(self) -> DistFolderEnv: - env = DistFolderEnv(self.dbt_path) - - env.create(self.dockerfile_venv_path) - return env - - def get_frozen(self) -> str: - env = self.make_venv() - pip_path = self.dockerfile_venv_path / 'bin/pip' - cmd = [pip_path, 'freeze'] - wheel_names = { - WheelInfo(wheel_path).package_name() for wheel_path in env.wheels - } - # remove the dependencies in dbt itself - return '\n'.join([ - dep for dep in collect_output(cmd).split('\n') - if dep.split('==')[0] not in wheel_names - ]) - - def write_lockfile(self): - freeze = self.get_frozen() - path = self.requirements_txt_path - if path.exists(): - raise ValueError(f'Found existing requirements file at {path}!') - os.makedirs(path.parent, exist_ok=True) - path.write_text(freeze) - - def get_dockerfile_contents(self): - dist_path = (self.dbt_path / 'dist').relative_to(Path.cwd()) - wheel_paths = ' '.join( - os.path.join('.', 'dist', p.name) - for p in _require_wheels(self.dbt_path) - ) - - requirements_path = self.requirements_txt_path.relative_to(Path.cwd()) - - return textwrap.dedent( - f'''\ - FROM python:3.8.1-slim-buster - - RUN apt-get update && \ - apt-get dist-upgrade -y && \ - apt-get install -y --no-install-recommends \ - git software-properties-common make build-essential \ - ca-certificates libpq-dev && \ - apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - - COPY {requirements_path} ./{self.requirements_file_name} - COPY {dist_path} ./dist - RUN pip install --upgrade pip setuptools - RUN pip install --requirement ./{self.requirements_file_name} - RUN pip install {wheel_paths} - - RUN useradd -mU dbt_user - - ENV PYTHONIOENCODING=utf-8 - ENV LANG C.UTF-8 - - WORKDIR /usr/app - VOLUME /usr/app - - USER dbt_user - ENTRYPOINT dbt - ''' - ) - - def write_dockerfile(self): - dockerfile = self.get_dockerfile_contents() - path = self.dockerfile_path - if path.exists(): - raise ValueError(f'Found existing docker file at {path}!') - os.makedirs(path.parent, exist_ok=True) - path.write_text(dockerfile) - - @property - def image_tag(self): - return f'dbt:{self.version}' - - @property - def remote_tag(self): - return f'fishtownanalytics/{self.image_tag}' - - def create_docker_image(self): - run_command( - [ - 'docker', 'build', - '-f', self.dockerfile_path, - '--tag', self.image_tag, - # '--no-cache', - self.dbt_path, - ], - cwd=self.dbt_path - ) - - def set_remote_tag(self): - # tag it - run_command( - ['docker', 'tag', self.image_tag, self.remote_tag], - cwd=self.dbt_path, - ) - - def commit_docker_folder(self): - # commit the contents of docker/ - run_command( - ['git', 'add', 'docker'], - cwd=self.dbt_path - ) - commit_msg = f'Add {self.image_tag} dockerfiles and requirements' - run_command(['git', 'commit', '-m', commit_msg], cwd=self.dbt_path) - - def build( - self, - write_requirements: bool = True, - write_dockerfile: bool = True - ): - if write_requirements: - self.write_lockfile() - if write_dockerfile: - self.write_dockerfile() - self.commit_docker_folder() - self.create_docker_image() - self.set_remote_tag() - - def push(self): - run_command( - ['docker', 'push', self.remote_tag] - ) - - -def sanity_check(): - if sys.version_info[:len(HOMEBREW_PYTHON)] != HOMEBREW_PYTHON: - python_version_str = '.'.join(str(i) for i in HOMEBREW_PYTHON) - print(f'This script must be run with python {python_version_str}') - sys.exit(1) - - # avoid "what's a bdist_wheel" errors - try: - import wheel # type: ignore # noqa - except ImportError: - print( - 'The wheel package is required to build. Please run:\n' - 'pip install -r dev_requirements.txt' - ) - sys.exit(1) - - -def upgrade_to(args: Arguments): - if args.set_version: - set_version(args.path, args.version, args.part) - - builder = PypiBuilder(args.path) - if args.build_pypi: - builder.build() - - if args.upload_pypi: - if args.test_upload: - builder.upload() - input( - f'Ensure https://test.pypi.org/project/dbt/{args.version}/ ' - 'exists and looks reasonable' - ) - builder.upload(test=False) - - if args.build_homebrew: - if args.upload_pypi: - print('waiting a minute for pypi before trying to pip install') - # if we uploaded to pypi, wait a minute before we bother trying to - # pip install - time.sleep(60) - HomebrewBuilder( - dbt_path=args.path, - version=args.version, - homebrew_path=args.homebrew_path, - set_default=args.homebrew_set_default, - ).build() - - if args.build_docker: - builder = DockerBuilder( - dbt_path=args.path, - version=args.version, - ) - builder.build( - write_requirements=args.write_requirements, - write_dockerfile=args.write_dockerfile, - ) - if args.upload_docker: - builder.push() - - -def main(): - sanity_check() - args = Arguments.parse() - upgrade_to(args) - - -if __name__ == '__main__': - main() diff --git a/scripts/dtr.py b/scripts/dtr.py deleted file mode 100755 index 0920f90c743..00000000000 --- a/scripts/dtr.py +++ /dev/null @@ -1,369 +0,0 @@ -#!/usr/bin/env python3 -import argparse -import os -import shlex -import shutil -import subprocess -import sys - -# Python version defaults to 3.6 -# To run postgres integration tests: `dtr.py -i -t pg` (this is the default) -# To run postgres integration tests, clearing `dbt.log` beforehand: `dtr.py -il -t pg` -# dtr.py -i -t pg -a test/integration/029_docs_generate_tests -# To run postgres + redshift integration tests: `dtr.py -i -t pg -t rs` -# To drop to pdb on failure, add `--pdb` or `-p` -# To run mypy tests: `dtr.py -m`. -# To run flake8 test: `dtr.py -f`. -# To run unit tests: `dtr.py -u` -# To run rpc tests: `dtr.py -r` - -_SHORTHAND = { - 'p': 'postgres', - 'pg': 'postgres', - 'postgres': 'postgres', - 'pr': 'presto', - 'presto': 'presto', - 'r': 'redshift', - 'rs': 'redshift', - 'redshift': 'redshift', - 'b': 'bigquery', - 'bq': 'bigquery', - 'bigquery': 'bigquery', - 's': 'snowflake', - 'sf': 'snowflake', - 'snowflake': 'snowflake', -} - - -def type_convert(types: str): - result = set() - for t in types.split(','): - try: - result.add(_SHORTHAND[t]) - except KeyError: - raise ValueError( - 'value "{}" not allowed, must be one of [{}]' - .format(t, ','.join('"{}"'.format(k) for k in _SHORTHAND))) - return result - - -def parse_args(argv): - if not argv: - argv.extend(['-it', 'pg']) - parser = argparse.ArgumentParser() - parser.add_argument( - '-f', '--flake8', - help='run flake8', - dest='commands', - action='append_const', const=Flake8Builder - ) - parser.add_argument( - '-m', '--mypy', - help='Run mypy', - dest='commands', - action='append_const', const=MypyBuilder - ) - parser.add_argument( - '-u', '--unit', - help='run unit tests', - dest='commands', - action='append_const', const=UnitBuilder - ) - parser.add_argument( - '-i', '--integration', - help='run integration tests', - dest='commands', - action='append_const', const=IntegrationBuilder - ) - parser.add_argument( - '-r', '--rpc', - help='run rpc tests', - dest='commands', - action='append_const', const=RPCBuilder - ) - - parser.add_argument('-v', '--python-version', - default='38', choices=['36', '37', '38'], - help='what python version to run') - parser.add_argument( - '-t', '--types', - default=None, - help='The types of tests to run, if this is an integration run' - ) - parser.add_argument( - '-c', '--continue', - action='store_false', dest='stop', - help='If set, continue on failures' - ) - parser.add_argument( - '-l', '--remove-logs', - action='store_true', - help='remove dbt log files before running' - ) - - parser.add_argument( - '-1', '--single-threaded', - action='store_true', - help='Specify if the DBT_TEST_SINGLE_THREADED environment variable should be set' - ) - parser.add_argument( - '--coverage', - action='store_true', - help='Make a coverage report and print it to the terminal' - ) - parser.add_argument( - '-p', '--pdb', - action='store_true', - help='Drop into ipdb on failures, implies "--no-multi"' - ) - parser.add_argument( - '-k', - action='append', - nargs='?', - default=[], - help='Pass-through to pytest, test selector expression' - ) - parser.add_argument( - '--no-multi', - action='store_false', - dest='multi', - help='Turn off multiprocessing' - ) - - parser.add_argument( - '--docker-args', - action='append', - nargs='?', - default=[], - help='Specify docker-compose args') - parser.add_argument( - '--tox-args', - action='append', - nargs='?', - default=[], - help='Specify tox args') - parser.add_argument( - '--pylint-args', - action='append', - nargs='?', - default=[], - help='Specify pylint args') - parser.add_argument( - '-a', '--test-args', - action='append', - nargs='?', - default=[], - help='Specify integration test parameters, tacked on to the end' - ) - parser.add_argument( - '--unit-args', - action='append', - nargs='?', - default=[], - help='Specify unit test parameters, tacked on to the end' - ) - parser.add_argument( - '--flake8-args', - action='append', - nargs='?', - default=[], - help='Specify flake8 parameters, tacked on to the end' - ) - parser.add_argument( - '--mypy-args', - action='append', - nargs='?', - default=[], - help='Specify mypy parameters, tacked on to the end' - ) - parser.add_argument( - 'extra', - nargs='*', - default=[], - help='Any extra args that will apply to all pytest runs' - ) - parser.add_argument( - '--debug', - action='store_true', - ) - - parsed = parser.parse_args(argv) - if parsed.types: - parsed.types = type_convert(parsed.types) - else: - parsed.types = {'postgres', 'redshift', 'bigquery', 'snowflake'} - return parsed - - -class ArgBuilder(object): - - def __init__(self, parsed): - self.parsed = parsed - self.args = [] - self.add_test_environment_args() - - def add_extras(self): - raise NotImplementedError - - def add_container_args(self): - pass - - def run(self): - print('args={}'.format(self.args)) - result = subprocess.run(self.args) - result.check_returncode() - - def add_test_environment_args(self): - pass - - -class DockerBuilder(ArgBuilder): - def add_docker_args(self): - self.args = ['docker-compose', 'run', '--rm'] - if self.parsed.debug: - self.parsed.single_threaded = True - self.args.extend(('-e', 'DBT_MACRO_DEBUGGING=write')) - if self.parsed.single_threaded: - self.args.extend(('-e', 'DBT_TEST_SINGLE_THREADED=y')) - if self.parsed.docker_args: - self.args.extend(self.parsed.docker_args) - self.args.append('test') - - def add_test_environment_args(self): - super().add_test_environment_args() - self.add_docker_args() - - -class ToxBuilder(DockerBuilder): - def envname(self): - raise NotImplementedError('need an env name') - - def add_tox_args(self): - self.args.extend(['tox', '-e', self.envname()]) - if self.parsed.tox_args: - self.args.extend(self.parsed.tox_args) - self.args.append('--') - - def add_test_environment_args(self): - super().add_test_environment_args() - self.add_tox_args() - - -class PytestBuilder(ToxBuilder): - DEFAUlTS = None - - def envname(self): - return 'explicit-py{}'.format(self.parsed.python_version) - - def add_pytest_args(self): - assert self.DEFAUlTS is not None - self.args.append('-s') - if self.parsed.pdb: - self.args.extend(['--pdb', '--pdbcls=IPython.terminal.debugger:Pdb']) - self.parsed.multi = False - if self.parsed.stop: - self.args.append('-x') - if self.parsed.coverage: - self.args.extend(('--cov', 'dbt', '--cov-branch', '--cov-report', 'term')) - for arg in self.parsed.k: - self.args.extend(('-k', arg)) - if self.parsed.multi: - self.args.extend(('-n', 'auto')) - - if not self.add_extra_pytest_args(): - self.args.extend(self.DEFAUlTS) - - def add_extra_pytest_args(self): - raise NotImplementedError - - def add_test_environment_args(self): - super().add_test_environment_args() - self.add_pytest_args() - - -class IntegrationBuilder(PytestBuilder): - DEFAUlTS = ['test/integration'] - - def add_extra_pytest_args(self): - if self.parsed.types: - self.args.append('-m') - typestrs = ('profile_{}'.format(t) for t in self.parsed.types) - selector = ' or '.join(typestrs) - self.args.append(shlex.quote(selector)) - start = len(self.args) - self.args.extend(self.parsed.test_args) - self.args.extend(self.parsed.extra) - return len(self.args) - start > 0 - - -class RPCBuilder(PytestBuilder): - DEFAUlTS = ['test/rpc'] - - def add_extra_pytest_args(self): - start = len(self.args) - self.args.extend(self.parsed.test_args) - self.args.extend(self.parsed.extra) - return len(self.args) - start > 0 - - -class UnitBuilder(PytestBuilder): - DEFAUlTS = ['test/unit'] - - def add_extra_pytest_args(self): - start = len(self.args) - self.args.extend(self.parsed.unit_args) - self.args.extend(self.parsed.extra) - return len(self.args) - start > 0 - - -class Flake8Builder(ArgBuilder): - def add_test_environment_args(self): - self.args.extend(['flake8', '--select', 'E,W,F', '--ignore', 'W504']) - start = len(self.args) - self.args.extend(self.parsed.flake8_args) - if len(self.args) == start: - if os.path.exists('dbt/main.py'): - self.args.append('dbt') - elif os.path.exists('core/dbt/main.py'): - self.args.append('core/dbt') - for adapter in ('postgres', 'redshift', 'bigquery', 'snowflake'): - self.args.append('plugins/{}/dbt'.format(adapter)) - - -class MypyBuilder(ToxBuilder): - def envname(self): - return 'mypy-dev' - - def run(self): - # The cache is a big source of false errors - if os.path.exists('./.mypy_cache'): - shutil.rmtree('./.mypy_cache') - return super().run() - - def add_test_environment_args(self): - super().add_test_environment_args() - self.args.extend(self.parsed.mypy_args) - - -def main(argv=None): - if argv is None: - argv = sys.argv[1:] - parsed = parse_args(argv) - print('args={}'.format(parsed)) - if parsed.remove_logs: - path = 'logs/dbt.log' - if os.path.exists(path): - os.remove(path) - - try: - for cls in parsed.commands: - builder = cls(parsed) - builder.run() - except subprocess.CalledProcessError: - print('failed!') - sys.exit(1) - print('success!') - - -if __name__ == '__main__': - main() diff --git a/test/integration/015_cli_invocation_tests/test_cli_invocation.py b/test/integration/015_cli_invocation_tests/test_cli_invocation.py index e298adc1d68..eaa44f3608f 100644 --- a/test/integration/015_cli_invocation_tests/test_cli_invocation.py +++ b/test/integration/015_cli_invocation_tests/test_cli_invocation.py @@ -149,7 +149,7 @@ def test_postgres_dbt_commands_with_cwd_as_project_dir(self): @use_profile('postgres') def test_postgres_dbt_commands_with_randomdir_as_project_dir(self): - workdir = os.getcwd() + workdir = self.test_root_dir with tempfile.TemporaryDirectory() as tmpdir: os.chdir(tmpdir) self._run_simple_dbt_commands(workdir) @@ -157,7 +157,7 @@ def test_postgres_dbt_commands_with_randomdir_as_project_dir(self): @use_profile('postgres') def test_postgres_dbt_commands_with_relative_dir_as_project_dir(self): - workdir = os.getcwd() + workdir = self.test_root_dir with tempfile.TemporaryDirectory() as tmpdir: os.chdir(tmpdir) self._run_simple_dbt_commands(os.path.relpath(workdir, tmpdir)) diff --git a/test/integration/100_rpc_test/test_rpc.py b/test/integration/100_rpc_test/test_rpc.py index 0517e9e7484..9487c8bb739 100644 --- a/test/integration/100_rpc_test/test_rpc.py +++ b/test/integration/100_rpc_test/test_rpc.py @@ -392,8 +392,8 @@ def make_many_requests(self, num_requests): return stored -@mark.flaky(rerun_filter=addr_in_use) class TestRPCServerCompileRun(HasRPCServer): + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_compile_sql_postgres(self): trivial = self.async_query( @@ -478,6 +478,7 @@ def test_compile_sql_postgres(self): compiled_sql=_select_from_ephemeral ) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_run_sql_postgres(self): # seed + run dbt to make models before using them! @@ -601,7 +602,7 @@ def test_run_sql_postgres(self): table={'column_names': ['id'], 'rows': [[1.0]]} ) - @mark.flaky(rerun_filter=None) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_ps_kill_postgres(self): task_tags = { @@ -691,7 +692,7 @@ def test_ps_kill_postgres(self): self.assertGreater(rowdict[1]['elapsed'], 0) self.assertIsNone(rowdict[1]['tags']) - @mark.flaky(rerun_filter=lambda *a, **kw: True) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_ps_kill_longwait_postgres(self): request_token, request_id = self.get_sleep_query() @@ -704,6 +705,7 @@ def test_ps_kill_longwait_postgres(self): result_data = self.kill_and_assert(request_token, request_id) self.assertTrue(len(result_data['logs']) > 0) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_invalid_requests_postgres(self): # invalid method -> error on the initial query @@ -759,6 +761,7 @@ def test_invalid_requests_postgres(self): self.assertIn('logs', error_data) self.assertTrue(len(error_data['logs']) > 0) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_timeout_postgres(self): data = self.async_query( @@ -781,7 +784,6 @@ def test_timeout_postgres(self): return -@mark.flaky(rerun_filter=addr_in_use) class TestRPCServerProjects(HasRPCServer): def assertHasResults(self, result, expected, *, missing=None, num_expected=None): dct = self.assertIsResult(result) @@ -819,6 +821,7 @@ def assertHasTestResults(self, results, expected, pass_results=None): self.assertEqual(passes, pass_results) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_seed_project_postgres(self): # testing "dbt seed" is tricky so we'll just jam some sql in there @@ -830,6 +833,7 @@ def test_seed_project_postgres(self): result = self.async_query('seed', show=False).json() self.correct_seed_result(result) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_seed_project_cli_postgres(self): self.run_sql_file("seed.sql") @@ -839,6 +843,7 @@ def test_seed_project_cli_postgres(self): result = self.async_query('cli_args', cli='seed').json() self.correct_seed_result(result) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_compile_project_postgres(self): @@ -859,6 +864,7 @@ def test_compile_project_postgres(self): num_expected=6, ) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_compile_project_cli_postgres(self): self.run_dbt_with_vars(['compile']) @@ -879,6 +885,7 @@ def test_compile_project_cli_postgres(self): num_expected=6, ) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_run_project_postgres(self): result = self.async_query('run').json() @@ -887,6 +894,7 @@ def test_run_project_postgres(self): result, {'descendant_model', 'multi_source_model', 'nonsource_descendant'}) self.assertTablesEqual('multi_source_model', 'expected_multi_source') + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_run_project_cli_postgres(self): result = self.async_query('cli_args', cli='run').json() @@ -894,6 +902,7 @@ def test_run_project_cli_postgres(self): result, {'descendant_model', 'multi_source_model', 'nonsource_descendant'}) self.assertTablesEqual('multi_source_model', 'expected_multi_source') + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_test_project_postgres(self): self.run_dbt_with_vars(['run']) @@ -902,6 +911,7 @@ def test_test_project_postgres(self): self.assertIn('results', result) self.assertHasTestResults(result['results'], 4) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_test_project_cli_postgres(self): self.run_dbt_with_vars(['run']) @@ -910,6 +920,7 @@ def test_test_project_cli_postgres(self): self.assertIn('results', result) self.assertHasTestResults(result['results'], 4) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) def assertManifestExists(self, nodes_length, sources_length): self.assertTrue(os.path.exists('target/manifest.json')) with open('target/manifest.json') as fp: @@ -919,6 +930,7 @@ def assertManifestExists(self, nodes_length, sources_length): self.assertIn('sources', manifest) self.assertEqual(len(manifest['sources']), sources_length) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) def assertHasDocsGenerated(self, result, expected): dct = self.assertIsResult(result) self.assertIn('state', dct) @@ -930,11 +942,13 @@ def assertHasDocsGenerated(self, result, expected): sources = dct['sources'] self.assertEqual(set(sources), expected['sources']) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) def assertCatalogExists(self): self.assertTrue(os.path.exists('target/catalog.json')) with open('target/catalog.json') as fp: catalog = json.load(fp) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) def _correct_docs_generate_result(self, result): expected = { 'nodes': { @@ -956,6 +970,7 @@ def _correct_docs_generate_result(self, result): self.assertCatalogExists() self.assertManifestExists(12, 5) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_docs_generate_postgres(self): self.run_dbt_with_vars(['run']) @@ -965,6 +980,7 @@ def test_docs_generate_postgres(self): result = self.async_query('docs.generate').json() self._correct_docs_generate_result(result) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_docs_generate_postgres_cli(self): self.run_dbt_with_vars(['run']) @@ -974,6 +990,7 @@ def test_docs_generate_postgres_cli(self): result = self.async_query('cli_args', cli='docs generate').json() self._correct_docs_generate_result(result) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_deps_postgres(self): self.async_query('deps').json() @@ -984,10 +1001,8 @@ def test_deps_postgres_cli(self): self.async_query('cli_args', cli='deps').json() -@mark.flaky(rerun_filter=addr_in_use) class TestRPCTaskManagement(HasRPCServer): - - @mark.flaky(rerun_filter=lambda *a, **kw: True) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_sighup_postgres(self): status = self.assertIsResult(self.query('status').json()) @@ -1035,6 +1050,7 @@ def test_sighup_postgres(self): self.assertRunning([alive]) self.kill_and_assert(*alive) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_gc_by_time_postgres(self): # make a few normal requests @@ -1055,6 +1071,7 @@ def test_gc_by_time_postgres(self): result = self.assertIsResult(resp) self.assertEqual(len(result['rows']), 0) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_gc_by_id_postgres(self): # make 10 requests, then gc half of them @@ -1095,7 +1112,6 @@ def _compare_result(self, result): return result['result']['state'] in ('error', 'ready') -@mark.flaky(rerun_filter=addr_in_use) class TestRPCServerDeps(HasRPCServer): ServerProcess = CompletingServerProcess should_seed = False @@ -1112,7 +1128,7 @@ def tearDown(self): super().tearDown() @property - def packages_config(selF): + def packages_config(self): return { # this is config-version 2, but with no upper bound 'packages': [ @@ -1143,6 +1159,7 @@ def _check_deps_ok(self, status): self.assertEqual(len(os.listdir('./dbt_modules')), 1) self.assertIsResult(self.async_query('compile').json()) + @mark.flaky(rerun_filter=addr_in_use, max_runs=3) @use_profile('postgres') def test_deps_compilation_postgres(self): status = self._check_start_predeps() diff --git a/test/integration/base.py b/test/integration/base.py index d9355b50d69..5a6cba3ee3b 100644 --- a/test/integration/base.py +++ b/test/integration/base.py @@ -141,9 +141,12 @@ class DBTIntegrationTest(unittest.TestCase): @property def database_host(self): - if os.name == 'nt': + if sys.platform == 'win32': return 'localhost' - return 'database' + elif sys.platform == 'darwin': + return 'localhost' + else: + return 'database' def postgres_profile(self): return { diff --git a/test/rpc/conftest.py b/test/rpc/conftest.py index 624d6a98c6e..b9675130ce0 100644 --- a/test/rpc/conftest.py +++ b/test/rpc/conftest.py @@ -1,4 +1,5 @@ import os +import sys import pytest import random import time @@ -57,8 +58,13 @@ def profiles_root(tmpdir): def project_root(tmpdir): return tmpdir.mkdir('project') - def postgres_profile_data(unique_schema): + database = 'database' + if sys.platform == 'win32': + datbase = 'localhost' + elif sys.platform == 'darwin': + database = 'localhost' + return { 'config': { 'send_anonymous_usage_stats': False @@ -68,7 +74,7 @@ def postgres_profile_data(unique_schema): 'default': { 'type': 'postgres', 'threads': 4, - 'host': 'database', + 'host': database, 'port': 5432, 'user': 'root', 'pass': 'password', @@ -78,7 +84,7 @@ def postgres_profile_data(unique_schema): 'other_schema': { 'type': 'postgres', 'threads': 4, - 'host': 'database', + 'host': database, 'port': 5432, 'user': 'root', 'pass': 'password', diff --git a/test/setup.sh b/test/setup.sh deleted file mode 100755 index 557805beae6..00000000000 --- a/test/setup.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -. /usr/local/bin/virtualenvwrapper.sh -mkdir -p ~/.virtualenv -mkvirtualenv dbt - -cd /usr/src/app - -pip install -r requirements.txt -pip install -r dev_requirements.txt diff --git a/test/setup_db.sh b/test/setup_db.sh index c39269e1fcd..b52c3c487d8 100644 --- a/test/setup_db.sh +++ b/test/setup_db.sh @@ -6,7 +6,6 @@ env | grep '^PG' # If you want to run this script for your own postgresql (run with # docker-compose) it will look like this: # PGHOST=127.0.0.1 PGUSER=root PGPASSWORD=password PGDATABASE=postgres \ -# bash test/setup.sh PGUSER="${PGUSER:-postgres}" export PGUSER PGPORT="${PGPORT:-5432}" diff --git a/tox.ini b/tox.ini index c25f9a1e739..858ef850c7d 100644 --- a/tox.ini +++ b/tox.ini @@ -1,388 +1,58 @@ [tox] skipsdist = True -envlist = unit-py36, integration-postgres-py36, integration-redshift-py36, integration-snowflake-py36, flake8, integration-bigquery-py36, mypy -requires = tox-pip-version +envlist = py36,py37,py38,flake8,mypy - -[testenv:flake8] -basepython = python3.6 -commands = /bin/bash -c '$(which flake8) --select=E,W,F --ignore=W504,E741 core/dbt plugins/*/dbt' -deps = - -r ./dev_requirements.txt - -[testenv:mypy] -basepython = python3.6 -commands = /bin/bash -c '$(which mypy) core/dbt' -pip_version = pip -deps = - -r ./requirements.txt - -r ./dev_requirements.txt - - -[testenv:mypy-dev] -basepython = python3.6 -commands = /bin/bash -c '$(which mypy) core/dbt' -deps = - -r ./editable_requirements.txt - -r ./dev_requirements.txt - - -[testenv:unit-py36] -basepython = python3.6 -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 test/unit' -deps = - -r ./requirements.txt - -r ./dev_requirements.txt - - -[testenv:integration-postgres-py36] -basepython = python3.6 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_postgres {posargs} -n4 test/integration/*' - /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 --profile=postgres test/rpc' -deps = - ./core - ./plugins/postgres - -r ./dev_requirements.txt - - -[testenv:integration-snowflake-py36] -basepython = python3.6 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_snowflake {posargs} -n4 test/integration/*' - /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 --profile=snowflake test/rpc' -deps = - ./core - ./plugins/snowflake - -r ./dev_requirements.txt - -[testenv:integration-bigquery-py36] -basepython = python3.6 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_bigquery {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/bigquery - -r ./dev_requirements.txt - -[testenv:integration-redshift-py36] -basepython = python3.6 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_redshift {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/postgres - ./plugins/redshift - -r ./dev_requirements.txt - -[testenv:integration-presto-py36] -basepython = python3.6 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_presto {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/presto - -r ./dev_requirements.txt - -[testenv:explicit-py36] -basepython = python3.6 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs}' -deps = - -r ./editable_requirements.txt - -r ./dev_requirements.txt - -[testenv:unit-py37] -basepython = python3.7 -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 test/unit' +[testenv] +skip_install = true +commands = {envpython} -m pytest {posargs} test/unit deps = - -r ./requirements.txt - -r ./dev_requirements.txt + -rdev-requirements.txt + -rrequirements.txt -[testenv:integration-postgres-py37] -basepython = python3.7 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_postgres {posargs} -n4 test/integration/*' && \ - /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 --profile=postgres test/rpc' -deps = - ./core - ./plugins/postgres - -r ./dev_requirements.txt - -[testenv:integration-snowflake-py37] -basepython = python3.7 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_snowflake {posargs} -n4 test/integration/*' - /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 --profile=snowflake test/rpc' -deps = - ./core - ./plugins/snowflake - -r ./dev_requirements.txt - -[testenv:integration-bigquery-py37] -basepython = python3.7 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_bigquery {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/bigquery - -r ./dev_requirements.txt - -[testenv:integration-redshift-py37] -basepython = python3.7 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_redshift {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/postgres - ./plugins/redshift - -r ./dev_requirements.txt - -[testenv:integration-presto-py37] -basepython = python3.7 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_presto {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/presto - -r ./dev_requirements.txt - - -[testenv:explicit-py37] -basepython = python3.7 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs}' -deps = - -r ./editable_requirements.txt - -r ./dev_requirements.txt - -[testenv:unit-py38] -basepython = python3.8 -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 test/unit' -deps = - -r ./requirements.txt - -r ./dev_requirements.txt - - -[testenv:integration-postgres-py38] -basepython = python3.8 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_postgres {posargs} -n4 test/integration/*' - /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 --profile=postgres test/rpc' -deps = - ./core - ./plugins/postgres - -r ./dev_requirements.txt - -[testenv:integration-snowflake-py38] -basepython = python3.8 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_snowflake {posargs} -n4 test/integration/*' - /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 --profile=snowflake test/rpc' -deps = - ./core - ./plugins/snowflake - -r ./dev_requirements.txt - -[testenv:integration-bigquery-py38] -basepython = python3.8 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_bigquery {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/bigquery - -r ./dev_requirements.txt - -[testenv:integration-redshift-py38] +[testenv:flake8] basepython = python3.8 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_redshift {posargs} -n4 test/integration/*' +skip_install = true +commands = flake8 --select=E,W,F --ignore=W504,E741 \ + core/dbt \ + plugins/bigquery/dbt \ + plugins/snowflake/dbt \ + plugins/postgres/dbt \ + plugins/redshift/dbt deps = - ./core - ./plugins/postgres - ./plugins/redshift - -r ./dev_requirements.txt + -rdev-requirements.txt - -[testenv:explicit-py38] +[testenv:mypy] basepython = python3.8 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs}' -deps = - -r ./editable_requirements.txt - -r ./dev_requirements.txt - -[testenv:unit-py39] -basepython = python3.9 -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 test/unit' -deps = - -r ./requirements.txt - -r ./dev_requirements.txt - - -[testenv:integration-postgres-py39] -basepython = python3.9 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_postgres {posargs} -n4 test/integration/*' - /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 --profile=postgres test/rpc' -deps = - ./core - ./plugins/postgres - -r ./dev_requirements.txt - -[testenv:integration-snowflake-py39] -basepython = python3.9 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_snowflake {posargs} -n4 test/integration/*' - /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs} -n4 --profile=snowflake test/rpc' -deps = - ./core - ./plugins/snowflake - -r ./dev_requirements.txt - -[testenv:integration-bigquery-py39] -basepython = python3.9 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_bigquery {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/bigquery - -r ./dev_requirements.txt - -[testenv:integration-redshift-py39] -basepython = python3.9 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v -m profile_redshift {posargs} -n4 test/integration/*' -deps = - ./core - ./plugins/postgres - ./plugins/redshift - -r ./dev_requirements.txt - - -[testenv:explicit-py39] -basepython = python3.9 -passenv = * -setenv = - HOME=/home/tox -commands = /bin/bash -c '{envpython} -m pytest --durations 0 -v {posargs}' -deps = - -r ./editable_requirements.txt - -r ./dev_requirements.txt - -[testenv:pywin] -basepython = {env:PYTHON:}\python.exe -passenv = * -setenv = - DBT_CONFIG_DIR = ~/.dbt - DBT_INVOCATION_ENV = ci-appveyor -commands = pytest --durations 0 -v -m 'profile_postgres or profile_snowflake or profile_bigquery or profile_redshift' -n4 test/integration test/unit -deps = - -r ./requirements.txt - -r ./dev_requirements.txt - -[testenv:pywin-unit] -basepython = python.exe -passenv = * -setenv = - DBT_CONFIG_DIR = ~/.dbt - DBT_INVOCATION_ENV = ci-appveyor -commands = python -m pytest --durations 0 -v {posargs} -n4 test/unit -deps = - -r ./requirements.txt - -r ./dev_requirements.txt - - -[testenv:pywin-postgres] -basepython = python.exe -passenv = * -setenv = - DBT_CONFIG_DIR = ~/.dbt - DBT_INVOCATION_ENV = ci-appveyor -commands = python -m pytest --durations 0 -v -m profile_postgres {posargs} -n4 test/integration -deps = - ./core - ./plugins/postgres - -r ./dev_requirements.txt - - -[testenv:pywin-snowflake] -basepython = python.exe -passenv = * -setenv = - DBT_CONFIG_DIR = ~/.dbt - DBT_INVOCATION_ENV = ci-appveyor -commands = python -m pytest --durations 0 -v -m profile_snowflake {posargs} -n4 test/integration -deps = - ./core - ./plugins/snowflake - -r ./dev_requirements.txt - - -[testenv:pywin-bigquery] -basepython = python.exe -passenv = * -setenv = - DBT_CONFIG_DIR = ~/.dbt - DBT_INVOCATION_ENV = ci-appveyor -commands = python -m pytest --durations 0 -v -m profile_bigquery {posargs} -n4 test/integration -deps = - ./core - ./plugins/bigquery - -r ./dev_requirements.txt - - -[testenv:pywin-redshift] -basepython = python.exe -passenv = * -setenv = - DBT_CONFIG_DIR = ~/.dbt - DBT_INVOCATION_ENV = ci-appveyor -commands = python -m pytest --durations 0 -v -m profile_redshift {posargs} -n4 test/integration -deps = - ./core - ./plugins/postgres - ./plugins/redshift - -r ./dev_requirements.txt +skip_install = true +commands = mypy core/dbt +deps = + -rdev-requirements.txt + -rrequirements.txt + +[testenv:py{36,37,38,39,}-{postgres,redshift,snowflake,bigquery}] +skip_install = true +passenv = * +commands = + postgres: {envpython} -m pytest {posargs} -m profile_postgres test/integration + postgres: {envpython} -m pytest {posargs} --profile=postgres test/rpc + redshift: {envpython} -m pytest {posargs} -m profile_redshift test/integration + redshift: {envpython} -m pytest {posargs} --profile=redshift test/rpc + snowflake: {envpython} -m pytest {posargs} -m profile_snowflake test/integration + snowflake: {envpython} -m pytest {posargs} --profile=snowflake test/rpc + bigquery: {envpython} -m pytest {posargs} -m profile_bigquery test/integration + bigquery: {envpython} -m pytest {posargs} --profile=bigquery test/rpc +deps = + -rdev-requirements.txt + ./core + postgres: ./plugins/postgres + redshift: ./plugins/redshift + snowflake: ./plugins/snowflake + bigquery: ./plugins/bigquery + +[pytest] +env_files = + test.env +testpaths = + test/unit + test/integration + test/rpc